From 910ab550dd6db9aa7596aef9c86b01e1e99bbf0a Mon Sep 17 00:00:00 2001 From: petersilva Date: Tue, 27 Jun 2023 14:27:01 -0400 Subject: [PATCH 01/10] inrement away from previous release --- debian/changelog | 6 ++++++ sarracenia/_version.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/debian/changelog b/debian/changelog index a23f4d0cd..33dd40b72 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +metpx-sr3 (3.00.42) UNRELEASED; urgency=medium + + * move away from previous release. + + -- Peter Silva Tue, 27 Jun 2023 14:26:23 -0400 + metpx-sr3 (3.00.41) unstable; urgency=medium * issue #700 nodupe_redis driver (experimental for now) diff --git a/sarracenia/_version.py b/sarracenia/_version.py index 363c5a148..3289bf158 100755 --- a/sarracenia/_version.py +++ b/sarracenia/_version.py @@ -1 +1 @@ -__version__ = "3.00.41" +__version__ = "3.00.42" From 2d943ca3e2869ec7372b41c536755ce29519332e Mon Sep 17 00:00:00 2001 From: Reid Sunderland Date: Tue, 27 Jun 2023 15:23:43 -0500 Subject: [PATCH 02/10] Fix for cases where downloaded file size is different than expected. For #709 (#710) When a file was downloaded successfully, but the size was different than in the message, or the message didn't include a file size, still rename the file and patch content_type. --- sarracenia/flow/__init__.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/sarracenia/flow/__init__.py b/sarracenia/flow/__init__.py index 41a78eb42..f3acbdc76 100644 --- a/sarracenia/flow/__init__.py +++ b/sarracenia/flow/__init__.py @@ -1835,13 +1835,6 @@ def download(self, msg, options) -> bool: if not self.o.dry_run: if accelerated: self.proto[self.scheme].update_file(new_inflight_path) - if (new_inflight_path != new_file): - if os.path.isfile(new_file): - os.remove(new_file) - os.rename(new_inflight_path, new_file) - # older versions don't include the contentType, so patch it here. - if 'contentType' not in msg: - msg['contentType'] = magic.from_file(new_file,mime=True) elif len_written < 0: logger.error("failed to download %s" % new_file) return False @@ -1865,8 +1858,18 @@ def download(self, msg, options) -> bool: 'incomplete download only %d of expected %d bytes for %s' % (len_written, block_length, new_inflight_path)) return False - - msg['size'] = len_written + # when len_written is different than block_length + msg['size'] = len_written + + # if we haven't returned False by this point, assuming download was successful + if (new_inflight_path != new_file): + if os.path.isfile(new_file): + os.remove(new_file) + os.rename(new_inflight_path, new_file) + + # older versions don't include the contentType, so patch it here. + if 'contentType' not in msg: + msg['contentType'] = magic.from_file(new_file,mime=True) self.metrics['flow']['transferRxBytes'] += len_written self.metrics['flow']['transferRxFiles'] += 1 From 2bfc089dcc1be3714471699a2e6a776d61f6de24 Mon Sep 17 00:00:00 2001 From: Peter Silva Date: Fri, 30 Jun 2023 15:19:46 -0400 Subject: [PATCH 03/10] updating config because of *destination* option refactor. --- sarracenia/examples/flow/amserver.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sarracenia/examples/flow/amserver.conf b/sarracenia/examples/flow/amserver.conf index 11d809364..4dc3d1a45 100644 --- a/sarracenia/examples/flow/amserver.conf +++ b/sarracenia/examples/flow/amserver.conf @@ -9,5 +9,5 @@ sum sha512 AllowIPs 127.0.0.1 AllowIPs 199.212.17.131/24 -destination am://0.0.0.0:5003 +sendTo am://0.0.0.0:5003 debug on From bd9d3ff88b1967051237092a00f0074f0aa49291 Mon Sep 17 00:00:00 2001 From: Elhussein Abdelhamid <65917467+Elhossein@users.noreply.github.com> Date: Thu, 6 Jul 2023 11:17:12 -0400 Subject: [PATCH 04/10] Resolves issue #712 (#713) Thanks very much! --- sarracenia/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sarracenia/__init__.py b/sarracenia/__init__.py index f9adcc636..b4de69053 100755 --- a/sarracenia/__init__.py +++ b/sarracenia/__init__.py @@ -261,6 +261,9 @@ def durationToSeconds(str_value, default=None) -> float: if type(str_value) in [int, float]: return str_value + + if type(str_value) is not str: + return 0 if str_value.lower() in [ 'none', 'off', 'false' ]: return 0 From 329cd83c18d264ce688326882724f223da0d78b9 Mon Sep 17 00:00:00 2001 From: Peter Silva Date: Sat, 15 Jul 2023 09:10:53 -0400 Subject: [PATCH 05/10] "possibly undeclared" option message should only be printed once. --- sarracenia/config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sarracenia/config.py b/sarracenia/config.py index eca333d59..eb17d35cf 100755 --- a/sarracenia/config.py +++ b/sarracenia/config.py @@ -1569,7 +1569,8 @@ def parse_file(self, cfg, component=None): setattr(self, k, v) else: #FIXME: with _options lists for all types and addition of declare, this is probably now dead code. - logger.debug('possibly undeclared option: %s' % line ) + if k not in self.undeclared: + logger.debug('possibly undeclared option: %s' % line ) v = ' '.join(line[1:]) if hasattr(self, k): if type(getattr(self, k)) is float: From b3755127f8f0d50b5ddf3e49ad8ef38e440e62ee Mon Sep 17 00:00:00 2001 From: Greg <129239095+gcglinton@users.noreply.github.com> Date: Tue, 25 Jul 2023 17:00:08 -0400 Subject: [PATCH 06/10] Fix a couple of problems in init (#727) These were found when developing unit tests --- sarracenia/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sarracenia/__init__.py b/sarracenia/__init__.py index b4de69053..14e3337dd 100755 --- a/sarracenia/__init__.py +++ b/sarracenia/__init__.py @@ -35,6 +35,7 @@ import os import os.path import paramiko +import random import re import sarracenia.filemetadata import stat as os_stat @@ -340,7 +341,7 @@ def __computeIdentity(msg, path, o): methods = [ 'random', 'md5', 'md5name', 'sha512', 'cod,md5', 'cod,sha512' ] - calc_method = choice(methods) + calc_method = random.choice(methods) elif 'identity' in xattr.x and 'mtime' in xattr.x: if xattr.get('mtime') >= msg['mtime']: logger.debug("mtime remembered by xattr") @@ -575,7 +576,7 @@ def fromFileInfo(path, o, lstat=None): 'value': o.identity_method[4:] } elif o.identity_method in ['random']: - algo = sarracenia.identity.Indentiy.factory(o.identity_method) + algo = sarracenia.identity.Identity.factory(o.identity_method) algo.set_path(post_relPath) msg['identity'] = { 'method': o.identity_method, From e401ab3a4286c342ae33082865560cb69dc5522e Mon Sep 17 00:00:00 2001 From: Greg <129239095+gcglinton@users.noreply.github.com> Date: Wed, 26 Jul 2023 09:10:20 -0400 Subject: [PATCH 07/10] Sarracenia __init__ unit test (#730) * New sarracenia init tests * Add pytest-mock requirement for unit tests * Sarracenia init unit test This has about 90% coverage * Update tests README with extra info on config Debugging tests wasn't working, and it's because generating code coverage while debugging breaks debugging. * Add try/except around init tests that use xattr module Apparently it's not a standard module everywhere, so this is a workaround Could also juse use SR3's filemetadata methods, as they handle things "nicely" --- tests/README.md | 27 +- tests/requirements.txt | 1 + tests/sarracenia/__init___test.py | 521 ++++++++++++++++++++++++++++++ 3 files changed, 548 insertions(+), 1 deletion(-) create mode 100644 tests/sarracenia/__init___test.py diff --git a/tests/README.md b/tests/README.md index 07112f57d..382c000f3 100644 --- a/tests/README.md +++ b/tests/README.md @@ -55,12 +55,15 @@ If you want to run this in VSCode, and have it do all the things nicely, you'll - [GitLens — Git supercharged](https://marketplace.visualstudio.com/items?itemName=eamodio.gitlens) Not strictly required, but *very* strongly recommended as it makes VS Code's git features fully functional -Beyond that, changing a few options in your settings file will make it all work; thusly: +Beyond that, changing a few things in your VS Code configs will make it all work. + +In `settings.json`, to get all the reports and coverage when running tests, and allow you to run individual tests even if they have dependencies: ```json { "python.testing.pytestArgs": [ "tests", "-v", "--cov-config=tests/.coveragerc", "--cov=sarracenia", "--cov-report=xml", "--cov-report=html", + "--html=tests/report.html", "--self-contained-html", "--failed-dependency-action=run", "--missing-dependency-action=run" ], "python.testing.unittestEnabled": false, @@ -69,6 +72,28 @@ Beyond that, changing a few options in your settings file will make it all work; } ``` + +In `launch.json` (per [documentation](https://code.visualstudio.com/docs/python/testing#_debug-tests)), to enable full debugging support in your tests: +```json +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Python: Debug Tests", + "type": "python", + "request": "launch", + "program": "${file}", + "purpose": ["debug-test"], + "console": "integratedTerminal", + "justMyCode": false, + "env": {"PYTEST_ADDOPTS": "--no-cov"} + } + ] +} +``` + +**NOTE:** Don't just squash whatever you have in `settings.json`, or `launch.json`, but use some common sense to merge what's above into your existing files. + ## Docker You can also run the exact same tests from within a Docker container if you want to avoid having to (re)-provision clean installs. diff --git a/tests/requirements.txt b/tests/requirements.txt index d671f0295..ee7a8d51c 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -3,6 +3,7 @@ pytest-cov>=4.0 pytest-bug>=1.2 pytest-depends>=1.0 pytest-html>=3.2 +pytest-mock>=3.11 python-redis-lock>=4 fakeredis>=2.11 diff --git a/tests/sarracenia/__init___test.py b/tests/sarracenia/__init___test.py new file mode 100644 index 000000000..1f9ec63f5 --- /dev/null +++ b/tests/sarracenia/__init___test.py @@ -0,0 +1,521 @@ +import pytest +#from unittest.mock import Mock + +import os +from base64 import b64decode +#import urllib.request +import logging + +import sarracenia +import sarracenia.config + +#useful for debugging tests +import pprint +pretty = pprint.PrettyPrinter(indent=2, width=200).pprint + + +logger = logging.getLogger('sarracenia') +logger.setLevel('DEBUG') + +def test_baseUrlParse(): + parsed = sarracenia.baseUrlParse('http://hostname.com/a/deep/path/file.txt?query=val') + assert parsed.scheme == "http" + assert parsed.query == "query=val" + + parsed = sarracenia.baseUrlParse('file:////opt/foobar/file.txt') + assert parsed.scheme == "file" + assert parsed.path == "/opt/foobar/file.txt" + + +def test_timev2tov3str(): + assert sarracenia.timev2tov3str('20230710T120000.123') == '20230710T120000.123' + assert sarracenia.timev2tov3str('20230710120000.123') == '20230710T120000.123' + + +def test_durationToSeconds(): + assert sarracenia.durationToSeconds('none') == sarracenia.durationToSeconds('off') == sarracenia.durationToSeconds('false') == 0.0 + assert sarracenia.durationToSeconds('on', default=10) == sarracenia.durationToSeconds('true', default=10) == 10.0 + + assert sarracenia.durationToSeconds('1s') == sarracenia.durationToSeconds('1S') == 1.0 + assert sarracenia.durationToSeconds('2m') == sarracenia.durationToSeconds('2M') == 120.0 + assert sarracenia.durationToSeconds('3h') == sarracenia.durationToSeconds('3H') == 10800.0 + assert sarracenia.durationToSeconds('4d') == sarracenia.durationToSeconds('4D') == 345600.0 + assert sarracenia.durationToSeconds('1w') == sarracenia.durationToSeconds('1W') == 604800.0 + assert sarracenia.durationToSeconds('0.5h') == sarracenia.durationToSeconds('0.5H') == 1800.0 + + assert sarracenia.durationToSeconds('invalid') == 0.0 + assert sarracenia.durationToSeconds(b'5') == 0.0 + assert sarracenia.durationToSeconds([5]) == 5.0 + + assert sarracenia.durationToSeconds(2.5) == 2.5 + assert sarracenia.durationToSeconds('1s', default=None) == 1.0 + assert sarracenia.durationToSeconds('1y') == 1.0 + assert sarracenia.durationToSeconds('-1s') == -1.0 + assert sarracenia.durationToSeconds('-1.5h') == -5400.0 + + +def test_timeValidate(): + assert sarracenia.timeValidate('20230710120000') == True + assert sarracenia.timeValidate('2023-07-10T12:00:00') == False + assert sarracenia.timeValidate('2023-07-10T12:00:00.') == False + assert sarracenia.timeValidate('20230710120000.123') == True + assert sarracenia.timeValidate('2023-07-10T12:00:00.123') == False + assert sarracenia.timeValidate('2023-07-10T12:00:00Z') == False + assert sarracenia.timeValidate('2023-07-10T12:00:00.123Z') == False + assert sarracenia.timeValidate('20230710120000Z') == False + + assert sarracenia.timeValidate('20230710T10:11:00000') == False + assert sarracenia.timeValidate('!0230710120000') == False + + assert sarracenia.timeValidate('') == False + assert sarracenia.timeValidate('2023-07-10T12:00:00.1234') == False + assert sarracenia.timeValidate('2023-07-10T12:00:00.123Za') == False + assert sarracenia.timeValidate('2023-07-10T12:00:00.!@#$%') == False + assert sarracenia.timeValidate('2023-07-10T12:00:00. ') == False + + +def test_timeflt2str(): + assert sarracenia.timeflt2str(1632180811.123456) == '20210920T233331.123456001' + assert sarracenia.timeflt2str(1632180811.00123) == '20210920T233331.00123000145' + assert sarracenia.timeflt2str(1632180811) == '20210920T233331' + assert sarracenia.timeflt2str(0) == '19700101T000000' + assert sarracenia.timeflt2str(1234567890.123) == '20090213T233130.122999907' + assert sarracenia.timeflt2str(1625452800.5) == '20210705T024000.5' + + +def test_timestr2flt(): + assert sarracenia.timestr2flt('20210920T233331.123456') == 1632180811.123456 + assert sarracenia.timestr2flt('20210920T233331.00123') == 1632180811.00123 + assert sarracenia.timestr2flt('20210920T233331') == 1632180811.0 + assert sarracenia.timestr2flt('19700101T000000') == 0.0 + assert sarracenia.timestr2flt('19700101000000') == 0.0 + assert sarracenia.timestr2flt('20090213T233130.123') == 1234567890.123 + assert sarracenia.timestr2flt('20210705T024000.5') == 1625452800.5 + + +@pytest.mark.depends(on=['test_timestr2flt']) +def test_nowstr(): + import time + assert time.time() - sarracenia.timestr2flt(sarracenia.nowstr()) < 0.001 + + +@pytest.mark.depends(on=['test_nowstr']) +def test_nowflt(): + import time + assert time.time() - sarracenia.nowflt() < 0.001 + +# def test_naturalSize(): +# if sarracenia.extras['humanize']['present'] == True: +# assert sarracenia.naturalSize(1024) == '1.0 KiB' +# elif sarracenia.extras['humanize']['present'] == False: +# assert sarracenia.naturalSize(1024) == '1024' + +# def test_naturalTime(): +# if sarracenia.extras['humanize']['present'] == True: +# assert sarracenia.naturalTime(1024) == '17 minutes ago' +# elif sarracenia.extras['humanize']['present'] == False: +# assert sarracenia.naturalTime(1024) == '1024' + +@pytest.fixture +def message(): + msg = sarracenia.Message() + msg['_format'] = 'v03' + msg['baseUrl'] = 'https://example.com' + msg['relPath'] = 'path/to/file.txt' + return msg + + +class Test_Message(): + + def test___computeIdentity(self, tmp_path, mocker, caplog): + # Set 1 + path1 = str(tmp_path) + os.sep + "file1.txt" + open(path1, 'a').close() + options = sarracenia.config.default_config() + msg = sarracenia.Message() + msg['mtime'] = sarracenia.nowstr() + msg['size'] = 0 + #msg['mtime'] = sarracenia.timeflt2str(sarracenia.timestr2flt(msg['mtime']) + 1000) + msg._Message__computeIdentity(path1, options) + assert msg['identity']['method'] == options.identity_method + + # Set 2 + path2 = str(tmp_path) + os.sep + "file2.txt" + open(path2, 'a').close() + options = sarracenia.config.default_config() + options.randomize = True + msg = sarracenia.Message() + msg['mtime'] = sarracenia.nowstr() + msg['size'] = 0 + mocker.patch('random.choice', return_value=None) + msg._Message__computeIdentity(path2, options) + assert 'identity' not in msg + + # Set 3 - tests random, and algorithm == None + path3 = str(tmp_path) + os.sep + "file3.txt" + open(path3, 'a').close() + options = sarracenia.config.default_config() + options.randomize = True + msg = sarracenia.Message() + msg['mtime'] = sarracenia.nowstr() + msg['size'] = 0 + mocker.patch('random.choice', return_value=None) + msg._Message__computeIdentity(path3, options) + assert 'identity' not in msg + + # Set 4 - abitrary method + path4 = str(tmp_path) + os.sep + "file6.txt" + open(path4, 'a').close() + options = sarracenia.config.default_config() + options.identity_method = 'arbitrary' + options.identity_arbitrary_value = "identity_arbitrary_value" + msg = sarracenia.Message() + msg['mtime'] = sarracenia.nowstr() + msg['size'] = 0 + msg._Message__computeIdentity(path4, options) + assert msg['identity']['value'] == 'identity_arbitrary_value' + + # Set 4a - random + options.identity = 'random' + options.identity_method = 'random' + del(msg['identity']) + msg._Message__computeIdentity(path4, options) + assert msg['identity']['method'] == 'random' + + # Set 4b - 'cod,*' method + options.identity = 'cod,testname' + options.identity_method = 'cod,testname' + del(msg['identity']) + msg._Message__computeIdentity(path4, options) + assert msg['identity'] == 'cod,testname' + + try: + import xattr + + # Set 5 - with identity/mtime xattrs *and* old mtime + path5 = str(tmp_path) + os.sep + "file5.txt" + open(path5, 'a').close() + xattr_mtime = sarracenia.timeflt2str(sarracenia.nowflt() - 1000) + xattr.setxattr(path5, b'user.sr_identity', b'xattr_identity_value') + xattr.setxattr(path5, b'user.sr_mtime', bytes(xattr_mtime, 'utf-8')) + options = sarracenia.config.default_config() + msg = sarracenia.Message() + msg['mtime'] = sarracenia.nowstr() + msg['size'] = 0 + msg._Message__computeIdentity(path5, options) + assert msg['identity']['method'] == options.identity_method + assert xattr.getxattr(path5, b'user.sr_mtime').decode('utf-8') == msg['mtime'] + # Set 5a - Cover cases where the identity on disk is different than what's configured + options.identity = 'md5name' + options.identity_method = 'md5name' + msg._Message__computeIdentity(path5, options) + found_log_set5 = False + for record in caplog.records: + if "xattr different method than on disk" in record.message: + found_log_set5 = True + assert found_log_set5 == True + + # Set 6 - with identity/mtime xattrs + path6 = str(tmp_path) + os.sep + "file4.txt" + open(path6, 'a').close() + msg_time = sarracenia.nowstr() + xattr.setxattr(path6, b'user.sr_identity', b'{"method": "sha512", "value": "xattr_identity_value"}') + xattr.setxattr(path6, b'user.sr_mtime', bytes(msg_time, 'utf-8')) + options = sarracenia.config.default_config() + msg = sarracenia.Message() + msg['mtime'] = msg_time + msg['size'] = 0 + msg._Message__computeIdentity(path6, options) + assert msg['identity']['value'] == 'xattr_identity_value' + except: + pass + + + @pytest.mark.depends(on=['test_fromFileInfo']) + def test_fromFileData(self, tmp_path): + path = str(tmp_path) + os.sep + "file.txt" + pathlink = str(tmp_path) + os.sep + "link.txt" + open(path, 'a').close() + os.symlink(path, pathlink) + o = sarracenia.config.default_config() + + # Test regular file + msg1 = sarracenia.Message.fromFileData(path, o, os.lstat(path)) + assert msg1['_format'] == 'v03' + assert len(msg1['_deleteOnPost']) == 10 + assert msg1['local_offset'] == 0 + + msg2 = sarracenia.Message.fromFileData(str(tmp_path), o, os.lstat(str(tmp_path))) + assert msg2['contentType'] == 'text/directory' + + msg3 = sarracenia.Message.fromFileData(pathlink, o, os.lstat(pathlink)) + assert msg3['contentType'] == 'text/link' + + msg4 = sarracenia.Message.fromFileData('/dev/null', o, os.lstat('/dev/null')) + assert msg4['size'] == 0 + + msg5 = sarracenia.Message.fromFileData('/dev/null', o) + assert "size" not in msg5.keys() + + + def test_fromFileInfo(self, tmp_path): + # Set 1 + path = str(tmp_path) + os.sep + "file1.txt" + open(path, 'a').close() + options = sarracenia.config.default_config() + + msg = sarracenia.Message.fromFileInfo(path, options, None) + assert msg['_format'] == 'v03' + assert len(msg['_deleteOnPost']) == 10 + assert msg['local_offset'] == 0 + + # Set 2 + path = str(tmp_path) + os.sep + "file2.txt" + open(path, 'a').close() + options = sarracenia.config.default_config() + options.permCopy = True + options.timeCopy = False + options.to_clusters = "to_clusters" + options.cluster = "from_cluster" + options.source = "source" + options.identity_method = '' + delattr(options, 'post_format') + msg = sarracenia.Message.fromFileInfo(path, options, os.lstat(path)) + assert msg['to_clusters'] == 'to_clusters' + assert msg['from_cluster'] == 'from_cluster' + assert msg['source'] == 'source' + assert msg['_format'] == 'v03' + + # Set 3 + options = sarracenia.config.default_config() + options.strip = 1 + options.identity_method = 'random' + options.post_format = 'post_format' + options.exchange = '' + options.post_exchange = 'post_exchange' + msg = sarracenia.Message.fromFileInfo(str(tmp_path), options, os.lstat(tmp_path)) + assert msg['rename'] == os.sep + os.path.relpath(tmp_path, '/tmp') + assert msg['_format'] == 'post_format' + assert msg['exchange'] == 'post_exchange' + assert msg['identity']['method'] == 'random' + + # Set 4 + path = str(tmp_path) + os.sep + "file4.txt" + open(path, 'a').close() + options = sarracenia.config.default_config() + options.strip = 20 + options.identity_method = 'cod,identityValue' + delattr(options, 'post_format') + options.post_topicPrefix = ['v02'] + msg = sarracenia.Message.fromFileInfo(path, options, os.lstat(path)) + assert msg['rename'] == "/" + assert msg['_format'] == "v02" + assert msg['identity'] == {'method': 'cod', 'value': 'identityValue' } + + #Set 5 + path = str(tmp_path) + os.sep + "file5.txt" + open(path, 'a').close() + options = sarracenia.config.default_config() + options.rename = str(tmp_path) + os.sep + "file4a.txt" + with pytest.raises(KeyError): + msg = sarracenia.Message.fromFileInfo(path, options, os.lstat(path)) + + + @pytest.mark.depends(on=['test_fromFileData']) + def test_fromStream(self, tmp_path): + path = str(tmp_path) + os.sep + "file.txt" + open(path, 'a').close() + o = sarracenia.config.default_config() + + data = b"Hello, World!" + + # Test fromStream method + msg = sarracenia.Message.fromStream(path, o, data) + assert msg['_format'] == 'v03' + assert len(msg['_deleteOnPost']) == 10 + assert msg['local_offset'] == 0 + + # Test with chmod + o.chmod = 0o700 + msg = sarracenia.Message.fromStream(path, o, data) + assert oct(os.stat(path).st_mode)[-3:] == '700' + + + @pytest.mark.depends(on=['sarracenia/__init___test.py::test_baseUrlParse']) + def test_updatePaths(self, tmp_path, mocker): + path = str(tmp_path) + os.sep + "file.txt" + open(path, 'a').close() + new_file = "newfile.txt" + new_dir = str(tmp_path) + os.sep + "new" + + #Test set 1 + options = sarracenia.config.default_config() + msg = sarracenia.Message() + with pytest.raises(Exception): + msg.updatePaths(options) + + msg = sarracenia.Message() + msg.updatePaths(options, new_dir, new_file) + assert msg['_deleteOnPost'] == set([ + 'new_dir', 'new_file', 'new_relPath', 'new_baseUrl', 'new_subtopic', 'post_format', '_format' + ]) + assert msg['new_dir'] == new_dir + assert msg['new_file'] == new_file + + #Test set 2 + pretty(options.post_format) + options = sarracenia.config.default_config() + options.post_baseUrl = 'https://post_baseurl.com' + options.fixed_headers = {'fixed_headers__Key1': 'fixed_headers__Val1'} + msg.updatePaths(options, new_dir, new_file) + assert msg['fixed_headers__Key1'] == 'fixed_headers__Val1' + assert msg['post_format'] == 'v03' + + #Test set 3 + options = sarracenia.config.default_config() + options.post_format = '' + options.post_topicPrefix = 'post_topicPrefix' + options.post_baseDir = str(tmp_path) + msg = sarracenia.Message() + msg['baseUrl'] = 'baseUrl' + msg.updatePaths(options, new_dir, new_file) + assert msg['new_baseUrl'] == 'baseUrl' + assert msg['post_format'] == 'p' + + #Test set 4 + options = sarracenia.config.default_config() + options.post_format = '' + options.post_topicPrefix = '' + options.topicPrefix = 'topicPrefix' + options.post_baseDir = 'post_baseDir' + msg = sarracenia.Message() + msg['baseUrl'] = 'baseUrl' + msg.updatePaths(options, new_dir, new_file) + assert msg['post_format'] == 't' + + #Test set 5 + options = sarracenia.config.default_config() + options.post_format = '' + options.post_topicPrefix = '' + options.topicPrefix = msg['_format'] + options.post_baseDir = 'post_baseDir' + msg = sarracenia.Message() + msg['baseUrl'] = '/this/is/a/path' + msg.updatePaths(options, '/this/is/a/path/new', new_file) + assert msg['new_baseUrl'] == '/this/is/a/path' + assert msg['post_format'] == msg['_format'] + + # Test set 6 + options = sarracenia.config.default_config() + options.post_baseUrl = 'https://post_baseurl.com' + msg = sarracenia.Message() + mocker.patch('sys.platform', 'win32') + msg.updatePaths(options, '\\this\\is\\a\\path\\new', new_file) + assert msg['new_relPath'] == '/this/is/a/path/new/newfile.txt' + options.currentDir = 'Z:' + msg.updatePaths(options, '\\this\\is\\a\\path\\new', new_file) + assert msg['new_relPath'] == 'this/is/a/path/new/newfile.txt' + + + def test_setReport(self): + msg = sarracenia.Message() + + # Test setReport method + msg.setReport(201, "Download successful") + assert 'report' in msg + assert msg['report']['code'] == 201 + assert msg['report']['message'] == "Download successful" + + msg.setReport(304) + assert msg['report']['message'] == sarracenia.known_report_codes[304] + + msg.setReport(418) + assert msg['report']['message'] == "unknown disposition" + + msg.setReport(418, "I'm a teapot") + assert msg['report']['message'] == "I'm a teapot" + + # Add more assertions for other fields in the message + + + @pytest.mark.depends(on=['sarracenia/__init___test.py::test_timeValidate']) + def test_validate(self, message): + + assert sarracenia.Message.validate('string') == False + + with pytest.raises(KeyError): + assert sarracenia.Message.validate(message) == False + + message['pubTime'] = '' + assert sarracenia.Message.validate(message) == False + + message['pubTime'] = '20230710120000' + assert sarracenia.Message.validate(message) == True + + + def test_getContent(self, mocker): + msg = sarracenia.Message() + + msg['content'] = { + 'encoding': '', + 'value': "sarracenia/_version.py" + } + assert msg.getContent() == b"sarracenia/_version.py" + + msg['content'] = { + 'encoding': 'base64', + 'value': 'c2FycmFjZW5pYS9fdmVyc2lvbi5weQ==' + } + # Test getContent method with inlined/embedded content + assert msg.getContent() == b"sarracenia/_version.py" + + expected_content = "sarracenia/_version.py" + import io + mocker.patch('urllib.request.urlopen', return_value=io.StringIO(expected_content)) + msg = sarracenia.Message() + msg['baseUrl'] = "https://NotARealURL.123" + msg['retrievePath'] = "MetPX/sarracenia/main/VERSION.txt" + assert msg.getContent() == expected_content + + + def test_copyDict(self, message): + message.copyDict(None) + assert message['_format'] == 'v03' + + message.copyDict({'foobar': 'baz'}) + assert message['foobar'] == 'baz' + + + def test_dumps(self, message): + # Test dumps method + assert message.dumps() == "{ '_deleteOnPost':'{'_format'}', '_format':'v03', 'baseUrl':'https://example.com', 'relPath':'path/to/file.txt' }" + + assert sarracenia.Message.dumps(None) == '' + + message['_format'] = 'v04' + message['properties'] = {'prop1': 'propval1'} + assert message.dumps() == "{ '_deleteOnPost':'{'_format'}', '_format':'v04', 'baseUrl':'https://example.com', 'properties':'https://example.com 'prop1':'propval1'', 'relPath':'path/to/file.txt' }" + + message['id'] = "id111" + del message['properties'] + assert message.dumps() == "{ '_deleteOnPost':'{'_format'}', '_format':'v04', 'baseUrl':'https://example.com', 'relPath':'path/to/file.txt' }" + + message['_format'] = 'Wis' + del message['id'] + assert message.dumps() == "{ 'geometry': None, 'properties':{ '_deleteOnPost':'{'_format'}', '_format':'Wis', 'baseUrl':'https://example.com', 'relPath':'path/to/file.txt', } }" + + message['id'] = "id111" + message['geometry'] = "geometry111" + message['testdict'] = {'key1': 'val1', 'key2': 'val2'} + assert message.dumps() == "{ { 'id': 'id111', 'type':'Feature', 'geometry':geometry111 'properties':{ '_deleteOnPost':'{'_format'}', '_format':'Wis', 'baseUrl':'https://example.com', 'geometry':'geometry111', 'id':'id111', 'relPath':'path/to/file.txt', 'testdict':'{ 'key1':'val1', 'key2':'val2' }', } }" + + message['longfield'] = "hacskmbeponlfkfcmxxasoxjgrodcmovxbkzgnfxqimkmxshaztwsptqbulazgszjyiqoqasyukgjejtbrbeufvfdrxlurglhlszdehigvctczjtleadkpeycunthwzwdbxybhbewgcclljkebtwueldbhximikfbtgapiklmqzceyqlilebchekrxmvhfflaclqjddfrhicdttaabkfkhbwylnzyneattcjsgpordersenmbzyjeaybtyyahsde" + assert message.dumps() == "{ { 'id': 'id111', 'type':'Feature', 'geometry':geometry111 'properties':{ '_deleteOnPost':'{'_format'}', '_format':'Wis', 'baseUrl':'https://example.com', 'geometry':'geometry111', 'id':'id111', 'longfield':'hacskmbeponlfkfcmxxasoxjgrodcmovxbkzgnfxqimkmxshaztwsptqbulazgszjyiqoqasyukgjejtbrbeufvfdrxlurglhlszdehigvctczjtleadkpeycunthwzwdbxybhbewgcclljkebtwueldbhximikfbtgapiklmqzceyqlilebchekrxmvhfflaclqjddfrhicdttaabkfkhbwylnzyneattcjsgpordersenmbzyjeaybtyy...', 'relPath':'path/to/file.txt', 'testdict':'{ 'key1':'val1', 'key2':'val2' }', } }" + + message['longfield'] = "{hacskmbeponlfkfcmxxasoxjgrodcmovxbkzgnfxqimkmxshaztwsptqbulazgszjyiqoqasyukgjejtbrbeufvfdrxlurglhlszdehigvctczjtleadkpeycunthwzwdbxybhbewgcclljkebtwueldbhximikfbtgapiklmqzceyqlilebchekrxmvhfflaclqjddfrhicdttaabkfkhbwylnzyneattcjsgpordersenmbzyjeaybtyyahsde}" + assert message.dumps() == "{ { 'id': 'id111', 'type':'Feature', 'geometry':geometry111 'properties':{ '_deleteOnPost':'{'_format'}', '_format':'Wis', 'baseUrl':'https://example.com', 'geometry':'geometry111', 'id':'id111', 'longfield':'{hacskmbeponlfkfcmxxasoxjgrodcmovxbkzgnfxqimkmxshaztwsptqbulazgszjyiqoqasyukgjejtbrbeufvfdrxlurglhlszdehigvctczjtleadkpeycunthwzwdbxybhbewgcclljkebtwueldbhximikfbtgapiklmqzceyqlilebchekrxmvhfflaclqjddfrhicdttaabkfkhbwylnzyneattcjsgpordersenmbzyjeaybty...}', 'relPath':'path/to/file.txt', 'testdict':'{ 'key1':'val1', 'key2':'val2' }', } }" + + + + + From 10d730a5c09fcafacd66a43d57194ed806776d8e Mon Sep 17 00:00:00 2001 From: Greg <129239095+gcglinton@users.noreply.github.com> Date: Wed, 26 Jul 2023 18:02:36 -0400 Subject: [PATCH 08/10] Update unit testing patterns (#731) * Expand test debugging (pretty) Pretty printing wasn't awesome, but now it's better. * Move Options into a proper class * Move message creation into method This ensures that there's no collisions between tests if we modify properties/dict values * Use deepcopy on WorkList creation in each test This ensures that they're always unique, and have no collisions between tests * Fix broken retry tests * Add Note to top of retry "steps" test It's disabled, but can serve as good documentation of how to do it. * Capture test log existance, and assert against that It used to assert inside the "if" statement, but that would only ever be True. This way, it would be possible for the logs to be missing, and the test will fail. * Fix redisqueue cleanup test No longer need to specify a fake server, becasue we're checking the keys a little more inteligently. * Clean up commented code * Expand test debugging (pretty) Pretty printing wasn't awesome, but now it's better. * Move Options into a proper class * Move message creation into method This ensures that there's no collisions between tests if we modify properties/dict values * Use deepcopy on WorkList creation in each test This ensures that they're always unique, and have no collisions between tests * Fix broken retry tests * Add Note to top of retry "steps" test It's disabled, but can serve as good documentation of how to do it. * Capture test log existance, and assert against that It used to assert inside the "if" statement, but that would only ever be True. This way, it would be possible for the logs to be missing, and the test will fail. * Fix redisqueue cleanup test No longer need to specify a fake server, becasue we're checking the keys a little more inteligently. * Clean up commented code --- tests/sarracenia/diskqueue_test.py | 133 ++++++++---- tests/sarracenia/flowcb/nodupe/data_test.py | 9 +- tests/sarracenia/flowcb/nodupe/disk_test.py | 9 +- tests/sarracenia/flowcb/nodupe/name_test.py | 9 +- tests/sarracenia/flowcb/nodupe/nodupe_test.py | 9 +- tests/sarracenia/flowcb/nodupe/redis_test.py | 9 +- tests/sarracenia/flowcb/retry_test.py | 122 ++++++----- tests/sarracenia/flowcb/retry_teststeps.py | 151 +++++++++----- tests/sarracenia/redisqueue_test.py | 191 +++++++++--------- 9 files changed, 403 insertions(+), 239 deletions(-) diff --git a/tests/sarracenia/diskqueue_test.py b/tests/sarracenia/diskqueue_test.py index 4ad5c5c16..eb9ed76fa 100644 --- a/tests/sarracenia/diskqueue_test.py +++ b/tests/sarracenia/diskqueue_test.py @@ -2,83 +2,100 @@ import os #useful for debugging tests -#import pprint -#pretty = pprint.PrettyPrinter(indent=2, width=200).pprint +import pprint +def pretty(*things, **named_things): + for t in things: + pprint.PrettyPrinter(indent=2, width=200).pprint(t) + for k,v in named_things.items(): + print(str(k) + ":") + pprint.PrettyPrinter(indent=2, width=200).pprint(v) from sarracenia.diskqueue import DiskQueue +from sarracenia import Message as SR3Message class Options: + def __init__(self): + self.no = 1 + self.retry_ttl = 0 + self.logLevel = "DEBUG" + self.logFormat = "" + self.queueName = "TEST_QUEUE_NAME" + self.component = "sarra" + self.retry_driver = 'disk' + self.redisqueue_serverurl = "redis://Never.Going.To.Resolve:6379/0" + self.config = "foobar.conf" + self.pid_filename = "/tmp/sarracenia/diskqueue_test/pid_filename" + self.housekeeping = float(39) + self.batch = 0 def add_option(self, option, type, default = None): if not hasattr(self, option): setattr(self, option, default) - - pass - -BaseOptions = Options() -BaseOptions.retry_ttl = 0 -BaseOptions.logLevel = "DEBUG" -BaseOptions.queueName = "TEST_QUEUE_NAME" -BaseOptions.component = "sarra" -BaseOptions.config = "foobar.conf" -BaseOptions.pid_filename = "/tmp/sarracenia/diskqueue_test/pid_filename" -BaseOptions.housekeeping = float(39) - -message = { - "pubTime": "20180118151049.356378078", - "topic": "v02.post.sent_by_tsource2send", - "headers": { - "atime": "20180118151049.356378078", - "from_cluster": "localhost", - "mode": "644", - "mtime": "20180118151048", - "parts": "1,69,1,0,0", - "source": "tsource", - "sum": "d,c35f14e247931c3185d5dc69c5cd543e", - "to_clusters": "localhost" - }, - "baseUrl": "https://NotARealURL", - "relPath": "ThisIsAPath/To/A/File.txt", - "notice": "20180118151050.45 ftp://anonymous@localhost:2121 /sent_by_tsource2send/SXAK50_KWAL_181510___58785" -} + +def make_message(): + m = SR3Message() + m["pubTime"] = "20180118151049.356378078" + m["topic"] = "v02.post.sent_by_tsource2send" + m["mtime"] = "20180118151048" + m["headers"] = { + "atime": "20180118151049.356378078", + "from_cluster": "localhost", + "mode": "644", + "parts": "1,69,1,0,0", + "source": "tsource", + "sum": "d,c35f14e247931c3185d5dc69c5cd543e", + "to_clusters": "localhost" + } + m["baseUrl"] = "https://NotARealURL" + m["relPath"] = "ThisIsAPath/To/A/File.txt" + m["notice"] = "20180118151050.45 ftp://anonymous@localhost:2121 /sent_by_tsource2send/SXAK50_KWAL_181510___58785" + m["_deleteOnPost"] = set() + return m def test_msgFromJSON(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'work_retry') + message = make_message() + assert message == download_retry.msgFromJSON(jsonpickle.encode(message)) def test_msgToJSON(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'work_retry') + message = make_message() + assert jsonpickle.encode(message) + '\n' == download_retry.msgToJSON(message) def test__is_exired__TooSoon(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" BaseOptions.retry_ttl = 100000 download_retry = DiskQueue(BaseOptions, 'work_retry') + message = make_message() + assert download_retry.is_expired(message) == True def test__is_exired__TooLate(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" BaseOptions.retry_ttl = 1 download_retry = DiskQueue(BaseOptions, 'work_retry') import sarracenia + message = make_message() message["pubTime"] = sarracenia.nowstr() assert download_retry.is_expired(message) == False def test___len__(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'work_retry') - # fp = open(download_retry.queue_file, 'a') - # fp_new = open(download_retry.new_path, 'a') - # fp_hk = open(download_retry.housekeeping_path, 'a') - - # fp_new.write(download_retry.msgToJSON(message)) download_retry.msg_count += 1 assert len(download_retry) == 1 @@ -86,9 +103,11 @@ def test___len__(tmp_path): assert len(download_retry) == 2 def test_in_cache(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'work_retry') + message = make_message() download_retry.retry_cache = {} assert download_retry.in_cache(message) == False @@ -97,9 +116,11 @@ def test_in_cache(tmp_path): assert download_retry.in_cache(message) == True def test_needs_requeuing(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'work_retry') + message = make_message() download_retry.retry_cache = {} assert download_retry.needs_requeuing(message) == True @@ -110,9 +131,11 @@ def test_needs_requeuing(tmp_path): assert download_retry.needs_requeuing(message) == False def test_put__Single(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'test_put__Single') + message = make_message() download_retry.put([message]) assert download_retry.msg_count_new == 1 @@ -121,9 +144,11 @@ def test_put__Single(tmp_path): assert open(download_retry.new_path, 'r').read() == line def test_put__Multi(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'test_put__Multi') + message = make_message() download_retry.put([message, message, message]) assert download_retry.msg_count_new == 3 @@ -134,9 +159,11 @@ def test_put__Multi(tmp_path): assert contents == line + line + line def test_cleanup(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'test_cleanup') + message = make_message() fp = open(download_retry.queue_file, 'a') fp.write(jsonpickle.encode(message) + '\n') download_retry.msg_count = 1 @@ -150,6 +177,7 @@ def test_cleanup(tmp_path): assert download_retry.msg_count == 0 def test_msg_get_from_file__NoLine(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'test_msg_get_from_file__NoLine') @@ -159,9 +187,12 @@ def test_msg_get_from_file__NoLine(tmp_path): assert msg == None def test_msg_get_from_file(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'test_msg_get_from_file') + message = make_message() + fp = open(download_retry.queue_file, 'a') fp.write(jsonpickle.encode(message) + '\n') fp.flush() @@ -174,9 +205,12 @@ def test_msg_get_from_file(tmp_path): assert msg == message def test_get__Single(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'test_get__Single') + message = make_message() + fp = open(download_retry.queue_file, 'a') line = jsonpickle.encode(message) + '\n' fp.write(line) @@ -189,9 +223,12 @@ def test_get__Single(tmp_path): assert gotten == [message] def test_get__Multi(tmp_path): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'test_get__Multi') + message = make_message() + fp = open(download_retry.queue_file, 'a') line = jsonpickle.encode(message) + '\n' fp.write(line + line) @@ -204,9 +241,12 @@ def test_get__Multi(tmp_path): assert gotten == [message, message] def test_on_housekeeping__FinishRetry(tmp_path, caplog): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'test_on_housekeeping__FinishRetry') + message = make_message() + download_retry.queue_fp = open(download_retry.queue_file, 'a') line = jsonpickle.encode(message) + '\n' download_retry.queue_fp.write(line + line) @@ -216,14 +256,21 @@ def test_on_housekeeping__FinishRetry(tmp_path, caplog): assert hk_out == None + log_found_notFinished = False + for record in caplog.records: if "have not finished retry list" in record.message: - assert "have not finished retry list" in record.message + log_found_notFinished = True + + assert log_found_notFinished == True def test_on_housekeeping(tmp_path, caplog): + BaseOptions = Options() BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" download_retry = DiskQueue(BaseOptions, 'test_on_housekeeping') + message = make_message() + download_retry.new_fp = open(download_retry.new_path, 'a') line = jsonpickle.encode(message) + '\n' download_retry.new_fp.write(line + line) @@ -235,10 +282,16 @@ def test_on_housekeeping(tmp_path, caplog): assert os.path.exists(download_retry.queue_file) == True assert os.path.exists(download_retry.new_path) == False + log_found_HasQueue = log_found_NumMessages = log_found_Elapsed = False + for record in caplog.records: if "has queue" in record.message: - assert "has queue" in record.message + log_found_HasQueue = True if "Number of messages in retry list" in record.message: - assert "Number of messages in retry list" in record.message + log_found_NumMessages = True if "on_housekeeping elapse" in record.message: - assert "on_housekeeping elapse" in record.message \ No newline at end of file + log_found_Elapsed = True + + assert log_found_HasQueue == True + assert log_found_NumMessages == True + assert log_found_Elapsed == True \ No newline at end of file diff --git a/tests/sarracenia/flowcb/nodupe/data_test.py b/tests/sarracenia/flowcb/nodupe/data_test.py index 0173ea5a2..ad60a4db2 100644 --- a/tests/sarracenia/flowcb/nodupe/data_test.py +++ b/tests/sarracenia/flowcb/nodupe/data_test.py @@ -2,8 +2,13 @@ import os, types, copy #useful for debugging tests -#import pprint -#pretty = pprint.PrettyPrinter(indent=2, width=200).pprint +import pprint +def pretty(*things, **named_things): + for t in things: + pprint.PrettyPrinter(indent=2, width=200).pprint(t) + for k,v in named_things.items(): + print(str(k) + ":") + pprint.PrettyPrinter(indent=2, width=200).pprint(v) from sarracenia.flowcb.nodupe.data import Data from sarracenia import Message as SR3Message diff --git a/tests/sarracenia/flowcb/nodupe/disk_test.py b/tests/sarracenia/flowcb/nodupe/disk_test.py index 840531f69..0948bb2cf 100644 --- a/tests/sarracenia/flowcb/nodupe/disk_test.py +++ b/tests/sarracenia/flowcb/nodupe/disk_test.py @@ -2,8 +2,13 @@ import os, types, copy #useful for debugging tests -#import pprint -#pretty = pprint.PrettyPrinter(indent=2, width=200).pprint +import pprint +def pretty(*things, **named_things): + for t in things: + pprint.PrettyPrinter(indent=2, width=200).pprint(t) + for k,v in named_things.items(): + print(str(k) + ":") + pprint.PrettyPrinter(indent=2, width=200).pprint(v) from sarracenia.flowcb.nodupe.disk import NoDupe from sarracenia import Message as SR3Message diff --git a/tests/sarracenia/flowcb/nodupe/name_test.py b/tests/sarracenia/flowcb/nodupe/name_test.py index cccabe278..21dbb13ac 100644 --- a/tests/sarracenia/flowcb/nodupe/name_test.py +++ b/tests/sarracenia/flowcb/nodupe/name_test.py @@ -2,8 +2,13 @@ import os, types, copy #useful for debugging tests -#import pprint -#pretty = pprint.PrettyPrinter(indent=2, width=200).pprint +import pprint +def pretty(*things, **named_things): + for t in things: + pprint.PrettyPrinter(indent=2, width=200).pprint(t) + for k,v in named_things.items(): + print(str(k) + ":") + pprint.PrettyPrinter(indent=2, width=200).pprint(v) from sarracenia.flowcb.nodupe.name import Name from sarracenia import Message as SR3Message diff --git a/tests/sarracenia/flowcb/nodupe/nodupe_test.py b/tests/sarracenia/flowcb/nodupe/nodupe_test.py index e07f5c1a6..1e6320eb4 100644 --- a/tests/sarracenia/flowcb/nodupe/nodupe_test.py +++ b/tests/sarracenia/flowcb/nodupe/nodupe_test.py @@ -5,8 +5,13 @@ import fakeredis #useful for debugging tests -#import pprint -#pretty = pprint.PrettyPrinter(indent=2, width=200).pprint +import pprint +def pretty(*things, **named_things): + for t in things: + pprint.PrettyPrinter(indent=2, width=200).pprint(t) + for k,v in named_things.items(): + print(str(k) + ":") + pprint.PrettyPrinter(indent=2, width=200).pprint(v) from sarracenia.flowcb.nodupe.redis import NoDupe as NoDupe_Redis from sarracenia.flowcb.nodupe.disk import NoDupe as NoDupe_Disk diff --git a/tests/sarracenia/flowcb/nodupe/redis_test.py b/tests/sarracenia/flowcb/nodupe/redis_test.py index 537669329..c25bc0c94 100644 --- a/tests/sarracenia/flowcb/nodupe/redis_test.py +++ b/tests/sarracenia/flowcb/nodupe/redis_test.py @@ -5,8 +5,13 @@ import fakeredis, urllib.parse #useful for debugging tests -#import pprint -#pretty = pprint.PrettyPrinter(indent=2, width=200).pprint +import pprint +def pretty(*things, **named_things): + for t in things: + pprint.PrettyPrinter(indent=2, width=200).pprint(t) + for k,v in named_things.items(): + print(str(k) + ":") + pprint.PrettyPrinter(indent=2, width=200).pprint(v) from sarracenia.flowcb.nodupe.redis import NoDupe from sarracenia import Message as SR3Message diff --git a/tests/sarracenia/flowcb/retry_test.py b/tests/sarracenia/flowcb/retry_test.py index 901ac678a..a92c0689a 100644 --- a/tests/sarracenia/flowcb/retry_test.py +++ b/tests/sarracenia/flowcb/retry_test.py @@ -1,29 +1,37 @@ import pytest from unittest.mock import patch -import os, types +import os, types, copy #useful for debugging tests -#import pprint -#pretty = pprint.PrettyPrinter(indent=2, width=200).pprint +import pprint +def pretty(*things, **named_things): + for t in things: + pprint.PrettyPrinter(indent=2, width=200).pprint(t) + for k,v in named_things.items(): + print(str(k) + ":") + pprint.PrettyPrinter(indent=2, width=200).pprint(v) #from sarracenia.flowcb import FlowCB from sarracenia.flowcb.retry import Retry +from sarracenia import Message as SR3Message import fakeredis class Options: - retry_driver = 'disk' - redisqueue_serverurl = '' - no = 1 - retry_ttl = 0 - batch = 8 - logLevel = "DEBUG" - queueName = "TEST_QUEUE_NAME" - component = "sarra" - config = "foobar.conf" - pid_filename = "NotARealPath" - housekeeping = float(0) + def __init__(self): + self.no = 1 + self.retry_ttl = 0 + self.logLevel = "DEBUG" + self.logFormat = "" + self.queueName = "TEST_QUEUE_NAME" + self.component = "sarra" + self.retry_driver = 'disk' + self.redisqueue_serverurl = "redis://Never.Going.To.Resolve:6379/0" + self.config = "foobar.conf" + self.pid_filename = "/tmp/sarracenia/retyqueue_test/pid_filename" + self.housekeeping = float(0) + self.batch = 0 def add_option(self, option, type, default = None): if not hasattr(self, option): setattr(self, option, default) @@ -35,23 +43,26 @@ def add_option(self, option, type, default = None): WorkList.failed = [] WorkList.directories_ok = [] -message = { - "pubTime": "20180118151049.356378078", - "topic": "v02.post.sent_by_tsource2send", - "headers": { - "atime": "20180118151049.356378078", - "from_cluster": "localhost", - "mode": "644", - "mtime": "20180118151048", - "parts": "1,69,1,0,0", - "source": "tsource", - "sum": "d,c35f14e247931c3185d5dc69c5cd543e", - "to_clusters": "localhost" - }, - "baseUrl": "https://NotARealURL", - "relPath": "ThisIsAPath/To/A/File.txt", - "notice": "20180118151050.45 ftp://anonymous@localhost:2121 /sent_by_tsource2send/SXAK50_KWAL_181510___58785" -} +def make_message(): + m = SR3Message() + m["pubTime"] = "20180118151049.356378078" + m["topic"] = "v02.post.sent_by_tsource2send" + m["mtime"] = "20180118151048" + m["headers"] = { + "atime": "20180118151049.356378078", + "from_cluster": "localhost", + "mode": "644", + "parts": "1,69,1,0,0", + "source": "tsource", + "sum": "d,c35f14e247931c3185d5dc69c5cd543e", + "to_clusters": "localhost" + } + m["baseUrl"] = "https://NotARealURL" + m["relPath"] = "ThisIsAPath/To/A/File.txt" + m["notice"] = "20180118151050.45 ftp://anonymous@localhost:2121 /sent_by_tsource2send/SXAK50_KWAL_181510___58785" + m["_deleteOnPost"] = set() + return m + @pytest.mark.bug("DiskQueue.py doesn't cleanup properly") @pytest.mark.depends(on=['sarracenia/diskqueue_test.py', 'sarracenia/redisqueue_test.py']) @@ -59,6 +70,7 @@ def test_cleanup(tmp_path): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): BaseOptions_disk = Options() + BaseOptions_disk.retry_driver = 'disk' BaseOptions_disk.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" retry_disk = Retry(BaseOptions_disk) @@ -69,6 +81,8 @@ def test_cleanup(tmp_path): BaseOptions_redis.queueName = "test_cleanup" retry_redis = Retry(BaseOptions_redis) + message = make_message() + retry_disk.download_retry.put([message, message, message]) retry_disk.post_retry.put([message, message, message]) @@ -100,6 +114,8 @@ def test_metricsReport(tmp_path): BaseOptions_redis.queueName = "test_metricsReport" retry_redis = Retry(BaseOptions_redis) + message = make_message() + retry_disk.download_retry.put([message, message, message]) retry_disk.post_retry.put([message, message, message]) metrics_disk = retry_disk.metricsReport() @@ -125,11 +141,13 @@ def test_after_post(tmp_path): BaseOptions_redis.queueName = "test_after_post" retry_redis = Retry(BaseOptions_redis) - after_post_worklist_disk = WorkList + message = make_message() + + after_post_worklist_disk = copy.deepcopy(WorkList) after_post_worklist_disk.failed = [message, message, message] retry_disk.after_post(after_post_worklist_disk) - after_post_worklist_redis = WorkList + after_post_worklist_redis = copy.deepcopy(WorkList) after_post_worklist_redis.failed = [message, message, message] retry_redis.after_post(after_post_worklist_redis) @@ -149,11 +167,13 @@ def test_after_work__WLFailed(tmp_path): BaseOptions_redis.queueName = "test_after_work__WLFailed" retry_redis = Retry(BaseOptions_redis) - after_work_worklist_disk = WorkList + message = make_message() + + after_work_worklist_disk = copy.deepcopy(WorkList) after_work_worklist_disk.failed = [message, message, message] retry_disk.after_work(after_work_worklist_disk) - after_work_worklist_redis = WorkList + after_work_worklist_redis = copy.deepcopy(WorkList) after_work_worklist_redis.failed = [message, message, message] retry_redis.after_work(after_work_worklist_redis) @@ -176,11 +196,13 @@ def test_after_work__SmallQty(tmp_path): BaseOptions_redis.queueName = "test_after_work__SmallQty" retry_redis = Retry(BaseOptions_redis) - after_work_worklist_disk = WorkList + message = make_message() + + after_work_worklist_disk = copy.deepcopy(WorkList) after_work_worklist_disk.ok = [message, message, message] retry_disk.after_work(after_work_worklist_disk) - after_work_worklist_redis = WorkList + after_work_worklist_redis = copy.deepcopy(WorkList) after_work_worklist_redis.ok = [message, message, message] retry_redis.after_work(after_work_worklist_redis) @@ -202,20 +224,22 @@ def test_after_work(tmp_path): BaseOptions_redis.queueName = "test_after_work" retry_redis = Retry(BaseOptions_redis) - after_work_worklist_disk = WorkList + message = make_message() + + after_work_worklist_disk = copy.deepcopy(WorkList) after_work_worklist_disk.ok = [message, message, message] retry_disk.post_retry.put([message, message, message]) retry_disk.on_housekeeping() retry_disk.after_work(after_work_worklist_disk) - after_work_worklist_redis = WorkList + after_work_worklist_redis = copy.deepcopy(WorkList) after_work_worklist_redis.ok = [message, message, message] retry_redis.post_retry.put([message, message, message]) retry_redis.on_housekeeping() retry_redis.after_work(after_work_worklist_redis) assert len(retry_disk.download_retry) == len(retry_redis.download_retry) == 0 - assert len(after_work_worklist_disk.ok) == len(after_work_worklist_redis.ok) == 4 + assert len(after_work_worklist_disk.ok) == len(after_work_worklist_redis.ok) == 3 @pytest.mark.depends(on=['sarracenia/diskqueue_test.py', 'sarracenia/redisqueue_test.py']) def test_after_accept__SmallQty(tmp_path): @@ -233,11 +257,13 @@ def test_after_accept__SmallQty(tmp_path): BaseOptions_redis.queueName = "test_after_accept__SmallQty" retry_redis = Retry(BaseOptions_redis) - after_work_worklist_disk = WorkList + message = make_message() + + after_work_worklist_disk = copy.deepcopy(WorkList) after_work_worklist_disk.incoming = [message, message, message] retry_disk.after_accept(after_work_worklist_disk) - after_work_worklist_redis = WorkList + after_work_worklist_redis = copy.deepcopy(WorkList) after_work_worklist_redis.incoming = [message, message, message] retry_redis.after_accept(after_work_worklist_redis) @@ -258,20 +284,22 @@ def test_after_accept(tmp_path): BaseOptions_redis.queueName = "test_after_accept" retry_redis = Retry(BaseOptions_redis) - after_work_worklist_disk = WorkList + message = make_message() + + after_work_worklist_disk = copy.deepcopy(WorkList) after_work_worklist_disk.incoming = [message, message, message] retry_disk.download_retry.put([message, message, message]) retry_disk.on_housekeeping() retry_disk.after_accept(after_work_worklist_disk) - after_work_worklist_redis = WorkList + after_work_worklist_redis = copy.deepcopy(WorkList) after_work_worklist_redis.incoming = [message, message, message] retry_redis.download_retry.put([message, message, message]) retry_redis.on_housekeeping() retry_redis.after_accept(after_work_worklist_redis) - assert len(retry_disk.download_retry) == len(retry_redis.download_retry) == 0 - assert len(after_work_worklist_disk.incoming) == len(after_work_worklist_redis.incoming) == 4 + assert len(retry_disk.download_retry) == len(retry_redis.download_retry) == 1 + assert len(after_work_worklist_disk.incoming) == len(after_work_worklist_redis.incoming) == 3 @pytest.mark.depends(on=['sarracenia/diskqueue_test.py', 'sarracenia/redisqueue_test.py']) def test_on_housekeeping(tmp_path, caplog): @@ -287,6 +315,8 @@ def test_on_housekeeping(tmp_path, caplog): BaseOptions_redis.queueName = "test_on_housekeeping" retry_redis = Retry(BaseOptions_redis) + message = make_message() + retry_disk.download_retry.put([message, message, message]) retry_disk.post_retry.put([message, message, message]) diff --git a/tests/sarracenia/flowcb/retry_teststeps.py b/tests/sarracenia/flowcb/retry_teststeps.py index 7436689bf..4076d2026 100644 --- a/tests/sarracenia/flowcb/retry_teststeps.py +++ b/tests/sarracenia/flowcb/retry_teststeps.py @@ -1,36 +1,59 @@ + +############################################################################### +# NOTES +# +# This file isn't used, but serves as an example of how one might use +# the pytest-steps package to break down comparative tests into +# driver-specific steps +# +# This works well, except it doesn't help us validate that drivers +# do/return the same things, so it was abandoned. It is kept purely as a +# reference to how this *could* be done in other cases +# +# To use it, one would just have to install the `pytest-steps` package, +# and ideally add it to the tests/requirements.txt file so that the +# Unit Test pipeline works properly +############################################################################### + import pytest from pytest_steps import test_steps from unittest.mock import patch -import os, types +import os, types, copy #useful for debugging tests -#import pprint -#pretty = pprint.PrettyPrinter(indent=2, width=200).pprint +import pprint +def pretty(*things, **named_things): + for t in things: + pprint.PrettyPrinter(indent=2, width=200).pprint(t) + for k,v in named_things.items(): + print(str(k) + ":") + pprint.PrettyPrinter(indent=2, width=200).pprint(v) #from sarracenia.flowcb import FlowCB from sarracenia.flowcb.retry import Retry +from sarracenia import Message as SR3Message import fakeredis class Options: - retry_driver = 'disk' - redisqueue_serverurl = '' - no = 1 - retry_ttl = 0 - batch = 8 - logLevel = "DEBUG" - queueName = "TEST_QUEUE_NAME" - component = "sarra" - config = "foobar.conf" - pid_filename = "NotARealPath" - housekeeping = float(0) + def __init__(self): + self.no = 1 + self.retry_ttl = 0 + self.logLevel = "DEBUG" + self.logFormat = "" + self.queueName = "TEST_QUEUE_NAME" + self.component = "sarra" + self.retry_driver = 'disk' + self.redisqueue_serverurl = "redis://Never.Going.To.Resolve:6379/0" + self.config = "foobar.conf" + self.pid_filename = "/tmp/sarracenia/retyqueue_test/pid_filename" + self.housekeeping = float(0) + self.batch = 8 def add_option(self, option, type, default = None): if not hasattr(self, option): setattr(self, option, default) - - WorkList = types.SimpleNamespace() WorkList.ok = [] WorkList.incoming = [] @@ -38,23 +61,25 @@ def add_option(self, option, type, default = None): WorkList.failed = [] WorkList.directories_ok = [] -message = { - "pubTime": "20180118151049.356378078", - "topic": "v02.post.sent_by_tsource2send", - "headers": { - "atime": "20180118151049.356378078", - "from_cluster": "localhost", - "mode": "644", - "mtime": "20180118151048", - "parts": "1,69,1,0,0", - "source": "tsource", - "sum": "d,c35f14e247931c3185d5dc69c5cd543e", - "to_clusters": "localhost" - }, - "baseUrl": "https://NotARealURL", - "relPath": "ThisIsAPath/To/A/File.txt", - "notice": "20180118151050.45 ftp://anonymous@localhost:2121 /sent_by_tsource2send/SXAK50_KWAL_181510___58785" -} +def make_message(): + m = SR3Message() + m["pubTime"] = "20180118151049.356378078" + m["topic"] = "v02.post.sent_by_tsource2send" + m["mtime"] = "20180118151048" + m["headers"] = { + "atime": "20180118151049.356378078", + "from_cluster": "localhost", + "mode": "644", + "parts": "1,69,1,0,0", + "source": "tsource", + "sum": "d,c35f14e247931c3185d5dc69c5cd543e", + "to_clusters": "localhost" + } + m["baseUrl"] = "https://NotARealURL" + m["relPath"] = "ThisIsAPath/To/A/File.txt" + m["notice"] = "20180118151050.45 ftp://anonymous@localhost:2121 /sent_by_tsource2send/SXAK50_KWAL_181510___58785" + m["_deleteOnPost"] = set() + return m @pytest.mark.bug("DiskQueue.py doesn't cleanup properly") @test_steps('disk', 'redis') @@ -71,6 +96,8 @@ def cleanup__disk(tmp_path): BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" retry = Retry(BaseOptions) + message = make_message() + retry.download_retry.put([message, message, message]) retry.post_retry.put([message, message, message]) @@ -92,6 +119,8 @@ def cleanup__redis(): BaseOptions.queueName = "test_cleanup" retry = Retry(BaseOptions) + message = make_message() + retry.download_retry.put([message, message, message]) retry.post_retry.put([message, message, message]) @@ -120,6 +149,8 @@ def metricsReport__disk(tmp_path): BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" retry = Retry(BaseOptions) + message = make_message() + retry.download_retry.put([message, message, message]) retry.post_retry.put([message, message, message]) @@ -137,6 +168,8 @@ def metricsReport__redis(): BaseOptions.queueName = "test_metricsReport" retry = Retry(BaseOptions) + message = make_message() + retry.download_retry.put([message, message, message]) retry.post_retry.put([message, message, message]) @@ -161,7 +194,9 @@ def after_post__disk(tmp_path): BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" retry = Retry(BaseOptions) - after_post_worklist = WorkList + message = make_message() + + after_post_worklist = copy.deepcopy(WorkList) after_post_worklist.failed = [message, message, message] retry.after_post(after_post_worklist) @@ -177,7 +212,9 @@ def after_post__redis(): BaseOptions.queueName = "test_after_post" retry = Retry(BaseOptions) - after_post_worklist = WorkList + message = make_message() + + after_post_worklist = copy.deepcopy(WorkList) after_post_worklist.failed = [message, message, message] retry.after_post(after_post_worklist) @@ -200,7 +237,9 @@ def after_work__WLFailed__disk(tmp_path): BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" retry = Retry(BaseOptions) - after_work_worklist = WorkList + message = make_message() + + after_work_worklist = copy.deepcopy(WorkList) after_work_worklist.failed = [message, message, message] retry.after_work(after_work_worklist) @@ -217,7 +256,9 @@ def after_work__WLFailed__redis(): BaseOptions.queueName = "test_after_work__WLFailed" retry = Retry(BaseOptions) - after_work_worklist = WorkList + message = make_message() + + after_work_worklist = copy.deepcopy(WorkList) after_work_worklist.failed = [message, message, message] retry.after_work(after_work_worklist) @@ -243,7 +284,9 @@ def after_work__SmallQty__disk(tmp_path): BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" retry = Retry(BaseOptions) - after_work_worklist = WorkList + message = make_message() + + after_work_worklist = copy.deepcopy(WorkList) after_work_worklist.ok = [message, message, message] retry.after_work(after_work_worklist) @@ -261,7 +304,9 @@ def after_work__SmallQty__redis(): BaseOptions.queueName = "test_after_work__SmallQty" retry = Retry(BaseOptions) - after_work_worklist = WorkList + message = make_message() + + after_work_worklist = copy.deepcopy(WorkList) after_work_worklist.ok = [message, message, message] retry.after_work(after_work_worklist) @@ -285,7 +330,9 @@ def after_work__disk(tmp_path): BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" retry = Retry(BaseOptions) - after_work_worklist = WorkList + message = make_message() + + after_work_worklist = copy.deepcopy(WorkList) after_work_worklist.ok = [message, message, message] retry.post_retry.put([message, message, message]) retry.on_housekeeping() @@ -304,7 +351,9 @@ def after_work__redis(): BaseOptions.queueName = "test_after_work" retry = Retry(BaseOptions) - after_work_worklist = WorkList + message = make_message() + + after_work_worklist = copy.deepcopy(WorkList) after_work_worklist.ok = [message, message, message] retry.post_retry.put([message, message, message]) retry.on_housekeeping() @@ -331,7 +380,9 @@ def after_accept__SmallQty__disk(tmp_path): BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" retry = Retry(BaseOptions) - after_accept_worklist = WorkList + message = make_message() + + after_accept_worklist = copy.deepcopy(WorkList) after_accept_worklist.incoming = [message, message, message] retry.after_accept(after_accept_worklist) @@ -349,7 +400,9 @@ def after_accept__SmallQty__redis(): BaseOptions.queueName = "test_after_accept__SmallQty" retry = Retry(BaseOptions) - after_accept_worklist = WorkList + message = make_message() + + after_accept_worklist = copy.deepcopy(WorkList) after_accept_worklist.incoming = [message, message, message] retry.after_accept(after_accept_worklist) @@ -374,10 +427,12 @@ def after_accept__disk(tmp_path): BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" retry = Retry(BaseOptions) + message = make_message() + retry.download_retry.put([message, message, message]) retry.on_housekeeping() - after_accept_worklist = WorkList + after_accept_worklist = copy.deepcopy(WorkList) after_accept_worklist.incoming = [message, message, message] retry.after_accept(after_accept_worklist) @@ -394,7 +449,9 @@ def after_accept__redis(): BaseOptions.queueName = "test_after_accept" retry = Retry(BaseOptions) - after_accept_worklist = WorkList + message = make_message() + + after_accept_worklist = copy.deepcopy(WorkList) after_accept_worklist.incoming = [message, message, message] retry.download_retry.put([message, message, message]) retry.on_housekeeping() @@ -420,6 +477,8 @@ def on_housekeeping__disk(tmp_path, caplog): BaseOptions.pid_filename = str(tmp_path) + os.sep + "pidfilename.txt" retry = Retry(BaseOptions) + message = make_message() + retry.download_retry.put([message, message, message]) retry.post_retry.put([message, message, message]) @@ -443,6 +502,8 @@ def on_housekeeping__redis(caplog): BaseOptions.queueName = "test_on_housekeeping" retry = Retry(BaseOptions) + message = make_message() + #server_test_on_housekeeping = fakeredis.FakeServer() #retry.download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_on_housekeeping) #retry.post_retry.redis = fakeredis.FakeStrictRedis(server=server_test_on_housekeeping) diff --git a/tests/sarracenia/redisqueue_test.py b/tests/sarracenia/redisqueue_test.py index d59aec5b7..b0d95b294 100644 --- a/tests/sarracenia/redisqueue_test.py +++ b/tests/sarracenia/redisqueue_test.py @@ -2,50 +2,62 @@ from unittest.mock import patch #useful for debugging tests -#import pprint -#pretty = pprint.PrettyPrinter(indent=2, width=200).pprint +import pprint +def pretty(*things, **named_things): + for t in things: + pprint.PrettyPrinter(indent=2, width=200).pprint(t) + for k,v in named_things.items(): + print(str(k) + ":") + pprint.PrettyPrinter(indent=2, width=200).pprint(v) from sarracenia.redisqueue import RedisQueue +from sarracenia import Message as SR3Message import fakeredis import jsonpickle class Options: + def __init__(self): + self.no = 1 + self.retry_ttl = 0 + self.logLevel = "DEBUG" + self.logFormat = "" + self.queueName = "TEST_QUEUE_NAME" + self.component = "sarra" + self.retry_driver = 'disk' + self.redisqueue_serverurl = "redis://Never.Going.To.Resolve:6379/0" + self.config = "foobar.conf" + self.pid_filename = "/tmp/sarracenia/diskqueue_test/pid_filename" + self.housekeeping = float(39) + self.batch = 0 def add_option(self, option, type, default = None): if not hasattr(self, option): setattr(self, option, default) - pass - -BaseOptions = Options() -BaseOptions.retry_ttl = 0 -BaseOptions.logLevel = "INFO" -BaseOptions.queueName = "TEST_QUEUE_NAME" -BaseOptions.component = "sarra" -BaseOptions.config = "foobar.conf" -BaseOptions.redisqueue_serverurl = "redis://Never.Going.To.Resolve:6379/0" -BaseOptions.housekeeping = float(39) - -message = { - "pubTime": "20180118151049.356378078", - "topic": "v02.post.sent_by_tsource2send", - "headers": { - "atime": "20180118151049.356378078", - "from_cluster": "localhost", - "mode": "644", - "mtime": "20180118151048", - "parts": "1,69,1,0,0", - "source": "tsource", - "sum": "d,c35f14e247931c3185d5dc69c5cd543e", - "to_clusters": "localhost" - }, - "baseUrl": "https://NotARealURL", - "relPath": "ThisIsAPath/To/A/File.txt", - "notice": "20180118151050.45 ftp://anonymous@localhost:2121 /sent_by_tsource2send/SXAK50_KWAL_181510___58785" -} + +def make_message(): + m = SR3Message() + m["pubTime"] = "20180118151049.356378078" + m["topic"] = "v02.post.sent_by_tsource2send" + m["mtime"] = "20180118151048" + m["headers"] = { + "atime": "20180118151049.356378078", + "from_cluster": "localhost", + "mode": "644", + "parts": "1,69,1,0,0", + "source": "tsource", + "sum": "d,c35f14e247931c3185d5dc69c5cd543e", + "to_clusters": "localhost" + } + m["baseUrl"] = "https://NotARealURL" + m["relPath"] = "ThisIsAPath/To/A/File.txt" + m["notice"] = "20180118151050.45 ftp://anonymous@localhost:2121 /sent_by_tsource2send/SXAK50_KWAL_181510___58785" + m["_deleteOnPost"] = set() + return m def test___len__(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test___len__') download_retry.redis.lpush(download_retry.key_name, "first") assert len(download_retry) == 1 @@ -56,8 +68,10 @@ def test___len__(): def test__in_cache(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test__in_cache') + message = make_message() download_retry.retry_cache = {} assert download_retry._in_cache(message) == False @@ -66,25 +80,32 @@ def test__in_cache(): def test__is_exired__TooSoon(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() BaseOptions.retry_ttl = 100000 download_retry = RedisQueue(BaseOptions, 'test__is_exired__TooSoon') + message = make_message() + assert download_retry._is_expired(message) == True def test__is_exired__TooLate(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() BaseOptions.retry_ttl = 1 download_retry = RedisQueue(BaseOptions, 'test__is_exired__TooLate') import sarracenia + message = make_message() message["pubTime"] = sarracenia.nowstr() assert download_retry._is_expired(message) == False def test__needs_requeuing(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test__needs_requeuing') + message = make_message() download_retry.retry_cache = {} assert download_retry._needs_requeuing(message) == True @@ -94,21 +115,28 @@ def test__needs_requeuing(): def test__msgFromJSON(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test__msgFromJSON') + message = make_message() + assert message == download_retry._msgFromJSON(jsonpickle.encode(message)) def test__msgToJSON(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test__msgToJSON') + message = make_message() + assert jsonpickle.encode(message) == download_retry._msgToJSON(message) def test__lpop(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test__lpop') - #server_test__lpop = fakeredis.FakeServer() - #download_retry.redis = fakeredis.FakeStrictRedis(server=server_test__lpop) + + message = make_message() download_retry.put([message]) assert download_retry.redis.llen(download_retry.key_name_new) == 1 @@ -116,55 +144,48 @@ def test__lpop(): def test_put__Single(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test_put__Single') - #server_test_put_single = fakeredis.FakeServer() - #download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_put_single) + message = make_message() download_retry.put([message]) assert download_retry.redis.llen(download_retry.key_name_new) == 1 def test_put__Multi(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test_put__Multi') - #server_test_put_multi = fakeredis.FakeServer() - #download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_put_multi) + message = make_message() download_retry.put([message, message, message, message]) assert download_retry.redis.llen(download_retry.key_name_new) == 4 def test_cleanup(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): - + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test_cleanup') - #This test fails unless you explicity tell it to use a different server than the rest of the tests - # I don't know why that is, as setting the name above should ensure keyspace uniqueness among all tests - server_test_cleanup = fakeredis.FakeServer() - download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_cleanup) - - download_retry.redis.lpush(download_retry.key_name_lasthk, "data") + download_retry.redis.set(download_retry.key_name_lasthk, "data") download_retry.redis.lpush(download_retry.key_name_new, "data") download_retry.redis.lpush(download_retry.key_name_hk, "data") download_retry.redis.lpush(download_retry.key_name, "data") - #download_retry.redis_lock.acquire() - #download_retry.redis.lpush("lock:" + download_retry.key_name, "data") - - assert len(download_retry.redis.keys()) == 4 + assert len(download_retry.redis.keys(download_retry.key_name + "*")) == 3 + assert len(download_retry.redis.keys(download_retry.key_name_lasthk)) == 1 download_retry.cleanup() - assert len(download_retry.redis.keys()) == 0 + assert len(download_retry.redis.keys(download_retry.key_name + "*")) == 0 + assert len(download_retry.redis.keys(download_retry.key_name_lasthk)) == 0 def test_get__NotLocked_Single(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): - + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test_get__NotLocked_Single') - #server_test_get__NotLocked = fakeredis.FakeServer() - #download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_get__NotLocked) + message = make_message() download_retry.redis.lpush(download_retry.key_name, jsonpickle.encode(message)) @@ -175,19 +196,16 @@ def test_get__NotLocked_Single(): def test_get__NotLocked_Multi(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): - + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test_get__NotLocked_Multi') - #server_test_get__NotLocked = fakeredis.FakeServer() - #download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_get__NotLocked) + message = make_message() download_retry.redis.lpush(download_retry.key_name, jsonpickle.encode(message)) download_retry.redis.lpush(download_retry.key_name, jsonpickle.encode(message)) download_retry.redis.lpush(download_retry.key_name, jsonpickle.encode(message)) download_retry.redis.lpush(download_retry.key_name, jsonpickle.encode(message)) - #with patch(target="redis_lock.Lock.locked", new=lambda foo: False): - gotten = download_retry.get(2) assert len(gotten) == 2 @@ -195,11 +213,10 @@ def test_get__NotLocked_Multi(): def test_get__Locked(): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): - + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test_get__Locked') - #server_test_get__NotLocked = fakeredis.FakeServer() - #download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_get__NotLocked) + message = make_message() download_retry.redis.lpush(download_retry.key_name, jsonpickle.encode(message)) @@ -212,11 +229,9 @@ def test_get__Locked(): def test_on_housekeeping__TooSoon(caplog): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() download_retry = RedisQueue(BaseOptions, 'test_on_housekeeping__TooSoon') - #server_test_on_housekeeping__TooSoon = fakeredis.FakeServer() - #download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_on_housekeeping__TooSoon) - download_retry.redis.set(download_retry.key_name_lasthk, download_retry.now) hk_out = download_retry.on_housekeeping() @@ -226,39 +241,13 @@ def test_on_housekeeping__TooSoon(caplog): if "Housekeeping ran less than " in record.message: assert "Housekeeping ran less than " in record.message -# @pytest.mark.skip("No need to check if we're locked, per Peter") -# def test_on_housekeeping__Locked(caplog): -# with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): -# download_retry = RedisQueue(BaseOptions, 'test_on_housekeeping__Locked') - -# #server_test_on_housekeeping__Locked = fakeredis.FakeServer() -# #download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_on_housekeeping__Locked) - -# #import jsonpickle - -# #download_retry.redis.lpush(download_retry.key_name, jsonpickle.encode(message)) - -# #with patch(target="redis_lock.Lock.locked", new=lambda foo: True): - -# download_retry.redis.set(download_retry.key_name_lasthk, download_retry.now - download_retry.o.housekeeping - 100) -# download_retry.redis_lock.acquire() - -# hk_out = download_retry.on_housekeeping() - -# assert hk_out == None - -# import re -# for record in caplog.records: -# if "Another instance has lock on" in record.message: -# assert "Another instance has lock on" in record.message - def test_on_housekeeping__FinishRetry(caplog): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() BaseOptions.queueName = "test_on_housekeeping__FinishRetry" download_retry = RedisQueue(BaseOptions, 'test_on_housekeeping__FinishRetry') - #server_test_on_housekeeping__FinishRetry = fakeredis.FakeServer() - #download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_on_housekeeping__FinishRetry) + message = make_message() download_retry.redis.lpush(download_retry.key_name, jsonpickle.encode(message)) download_retry.redis.lpush(download_retry.key_name, jsonpickle.encode(message)) @@ -269,19 +258,21 @@ def test_on_housekeeping__FinishRetry(caplog): assert hk_out == None + log_found_notFinished = False + for record in caplog.records: if "have not finished retry list" in record.message: - assert "have not finished retry list" in record.message + log_found_notFinished = True + + assert log_found_notFinished == True def test_on_housekeeping(caplog): with patch(target="redis.from_url", new=fakeredis.FakeStrictRedis.from_url, ): + BaseOptions = Options() BaseOptions.queueName = "test_on_housekeeping" download_retry = RedisQueue(BaseOptions, 'test_on_housekeeping') - #server_test_on_housekeeping = fakeredis.FakeServer() - #download_retry.redis = fakeredis.FakeStrictRedis(server=server_test_on_housekeeping) - - #with patch(target="redis_lock.Lock.locked", new=lambda foo: True): + message = make_message() download_retry.redis.lpush(download_retry.key_name_new, jsonpickle.encode(message)) download_retry.redis.lpush(download_retry.key_name_new, jsonpickle.encode(message)) @@ -294,9 +285,13 @@ def test_on_housekeeping(caplog): assert hk_out == None assert download_retry.redis.exists(download_retry.key_name_hk) == False - import re + log_found_LockReleased = log_found_Elapsed = False + for record in caplog.records: if "released redis_lock" in record.message: - assert "released redis_lock" in record.message + log_found_LockReleased = True if "on_housekeeping elapse" in record.message: - assert "on_housekeeping elapse" in record.message \ No newline at end of file + log_found_Elapsed = True + + assert log_found_LockReleased == True + assert log_found_Elapsed == True \ No newline at end of file From ddbd669dc1f776d1a2f51384acd8cba8b453322a Mon Sep 17 00:00:00 2001 From: Peter Silva Date: Wed, 26 Jul 2023 23:14:15 -0400 Subject: [PATCH 09/10] issue #721 make magic an extra, degrade gracefully when missing. Also added *all* extra to install all extras. --- sarracenia/__init__.py | 20 ++++++++++++++------ sarracenia/flow/__init__.py | 9 ++++++--- setup.py | 6 +++++- 3 files changed, 25 insertions(+), 10 deletions(-) diff --git a/sarracenia/__init__.py b/sarracenia/__init__.py index 14e3337dd..f8392678a 100755 --- a/sarracenia/__init__.py +++ b/sarracenia/__init__.py @@ -31,7 +31,6 @@ import datetime import importlib.util import logging -import magic import os import os.path import paramiko @@ -475,11 +474,15 @@ def fromFileData(path, o, lstat=None): if lstat : if os_stat.S_ISREG(lstat.st_mode): m.__computeIdentity(path, o) - try: - t = magic.from_file(path,mime=True) - m['contentType'] = t - except Exception as ex: - logging.info("trying to determine mime-type. Exception details:", exc_info=True) + if extras['filetypes']['present']: + try: + t = magic.from_file(path,mime=True) + m['contentType'] = t + except Exception as ex: + logging.info("trying to determine mime-type. Exception details:", exc_info=True) + #else: + # m['contentType'] = 'application/octet-stream' # https://www.rfc-editor.org/rfc/rfc2046.txt (default when clueless) + # I think setting a bad value is worse than none, so just omitting. elif os_stat.S_ISDIR(lstat.st_mode): m['contentType'] = 'text/directory' # source: https://www.w3.org/2002/12/cal/rfc2425.html elif os_stat.S_ISLNK(lstat.st_mode): @@ -804,6 +807,7 @@ def getContent(msg): amqp - ability to communicate with AMQP (rabbitmq) brokers mqtt - ability to communicate with MQTT brokers + filetypes - ability to ftppoll - ability to poll FTP servers vip - enable vip (Virtual IP) settings to implement singleton processing for high availability support. @@ -816,6 +820,7 @@ def getContent(msg): 'ftppoll' : { 'modules_needed': ['dateparser', 'pytz'], 'present': False, 'lament' : 'will not be able to poll with ftp' }, 'humanize' : { 'modules_needed': ['humanize' ], 'present': False, 'lament': 'humans will have to read larger, uglier numbers' }, 'mqtt' : { 'modules_needed': ['paho.mqtt.client'], 'present': False, 'lament': 'will not be able to connect to mqtt brokers' }, + 'filetypes' : { 'modules_needed': ['magic'], 'present': False, 'lament': 'will not be able to set content headers' }, 'vip' : { 'modules_needed': ['netifaces'] , 'present': False, 'lament': 'will not be able to use the vip option for high availability clustering' }, 'watch' : { 'modules_needed': ['watchdog'] , 'present': False, 'lament': 'cannot watch directories' } } @@ -837,6 +842,9 @@ def getContent(msg): # Some sort of graceful fallback, or good messaging for when dependencies are missing. +if extras['filetypes']['present']: + import magic + if extras['mqtt']['present']: import paho.mqtt.client if not hasattr( paho.mqtt.client, 'MQTTv5' ): diff --git a/sarracenia/flow/__init__.py b/sarracenia/flow/__init__.py index f3acbdc76..61f406ca8 100644 --- a/sarracenia/flow/__init__.py +++ b/sarracenia/flow/__init__.py @@ -1,7 +1,6 @@ import copy import importlib import logging -import magic import os import re @@ -58,6 +57,9 @@ 'vip': None } +if sarracenia.extras['filetypes']['present']: + import magic + if sarracenia.extras['vip']['present']: import netifaces @@ -1868,7 +1870,7 @@ def download(self, msg, options) -> bool: os.rename(new_inflight_path, new_file) # older versions don't include the contentType, so patch it here. - if 'contentType' not in msg: + if sarracenia.extras['filetypes']['present'] and 'contentType' not in msg: msg['contentType'] = magic.from_file(new_file,mime=True) self.metrics['flow']['transferRxBytes'] += len_written @@ -1957,7 +1959,8 @@ def send(self, msg, options): local_path = '/' + msg['relPath'] # older versions don't include the contentType, so patch it here. - if 'contentType' not in msg and not 'fileOp' in msg: + if sarracenia.extras['filetypes']['present'] and \ + ('contentType' not in msg) and (not 'fileOp' in msg): msg['contentType'] = magic.from_file(local_path,mime=True) local_dir = os.path.dirname(local_path).replace('\\', '/') diff --git a/setup.py b/setup.py index 10908b46c..0ac91eb82 100755 --- a/setup.py +++ b/setup.py @@ -83,13 +83,17 @@ def read(*parts): 'Topic :: System :: Logging', ], install_requires=[ - "appdirs", "humanfriendly", "humanize", "jsonpickle", "python-magic", "paramiko", + "appdirs", "humanfriendly", "humanize", "jsonpickle", "paramiko", "psutil>=5.3.0", "watchdog" ], extras_require = { 'amqp' : [ "amqp" ], + 'filetypes': [ "python-magic" ], 'ftppoll' : ['dateparser' ], 'mqtt': [ 'paho.mqtt>=1.5.1' ], 'vip': [ 'netifaces' ], 'redis': [ 'redis' ] }) + extras_require['all'] = list(itertools.chain.from_iterable(extras_require.values())) + + From 2ce3a332aa7f6d7e163c163a18d1779663931341 Mon Sep 17 00:00:00 2001 From: Peter Silva Date: Wed, 26 Jul 2023 23:36:50 -0400 Subject: [PATCH 10/10] updating documentation for #721, new filetypes extra. documenting recent changes (optional filetypes extra) and recent both languages. --- docs/source/Contribution/Development.rst | 14 ++++++++------ docs/source/Tutorials/Install.rst | 6 ++++++ .../fr/Contribution/D\303\251veloppement.rst" | 7 +++++++ docs/source/fr/Tutoriel/Installer.rst | 18 ++++++++++++------ 4 files changed, 33 insertions(+), 12 deletions(-) diff --git a/docs/source/Contribution/Development.rst b/docs/source/Contribution/Development.rst index 7822878b8..e0b47855b 100644 --- a/docs/source/Contribution/Development.rst +++ b/docs/source/Contribution/Development.rst @@ -137,16 +137,18 @@ Local Installation There are many different ways to install python packages on a computer. Different developers will prefer different methods, and all the methods need to be tested prior to each release. +Sarracenia can work with either mqtt or amqp (most mature and stable) message passing libraries. +Install one of those first. in these examples, we use amqp. * **Wheel** when people are running different operating systems (non-ubuntu, non-debian) people will be installing wheels, typically that have been uploaded to pypi.python.org. On the other hand, it is a bit of a pain/noise to upload every development version, so we only upload releases, so testing of wheels is done by building local wheels. Need to build a new wheel every time a change is made. -* **pip install (not -e)** would pull a wheel down from pypi.python.org. Generally not used during development of Sarracenia itself. +* **pip install metpx-sr3[amqp]** would pull a wheel down from pypi.python.org. Generally not used during development of Sarracenia itself. + one could also pull in all possible dependencies with **pip install metpx-sr3[all]** +* **pip install -e .[amqp] ... lets you edit the source code of the installed package, ideal for debugging problems, because it allows live changes to the application without having to go through building and installing a new package. -* **pip install -e** ... lets you edit the source code of the installed package, ideal for debugging problems, because it allows live changes to the application without having to go through building and installing a new package. +* **apt install metpx-sr3** install debian package from repositories, similarly to pip install (not -e), normally dev snapshots are not uploaded to repositories, so while this would be the normal way for users of ubuntu servers, it is not available during development of the package itself. Also need **apt install python3-amqp** -* **apt install** install debian package from repositories, similarly to pip install (not -e), normally dev snapshots are not uploaded to repositories, so while this would be the normal way for users of ubuntu servers, it is not available during development of the package itself. - -* **dpkg -i** builds a debian package for local installation. This is how packages are tested prior to upload to repositories. It can also be used to support development (have to run dpkg -i for each package change.) +* **dpkg -i** builds a debian package for local installation. This is how packages are tested prior to upload to repositories. It can also be used to support development (have to run dpkg -i for each package change.) also need **apt install python3-amqp** The sr_insects tests invokes the version of metpx-sarracenia that is installed on the system, and not what is in the development tree. It is necessary to install the package on @@ -998,7 +1000,7 @@ to identify more issues. sample run to 100,000 entries:: maximum of the shovels is: 100008 -While it is runnig one can run flow_check.sh at any time:: +While it is running one can run flow_check.sh at any time:: NB retries for sr_subscribe t_f30 0 NB retries for sr_sender 18 diff --git a/docs/source/Tutorials/Install.rst b/docs/source/Tutorials/Install.rst index 48b6136ae..6ddd579dd 100644 --- a/docs/source/Tutorials/Install.rst +++ b/docs/source/Tutorials/Install.rst @@ -51,6 +51,7 @@ On Ubuntu 22.04 and derivatives:: sudo add-apt-repository ppa:ssc-hpc-chp-spc/metpx sudo apt update sudo apt install metpx-sr3 # main python package. + sudo apt install python3-magic # optional support putting file type content-type message headers. sudo apt install metpx-sr3c # optional C client. sudo apt install python3-amqp # optionally support rabbitmq brokers sudo apt install python3-paho-mqtt # optionally support MQTT brokers @@ -139,6 +140,7 @@ For example, on fedora 28 mandatories:: Optional ones:: $ sudo dnf install python3-amqp # optionally support rabbitmq brokers + $ sudo dnf install python3-magic # optionally support content-type header in messages. $ sudo dnf install python3-netifaces # optionally support vip directive for HA. $ sudo dnf install python3-paho-mqtt # optionally support mqtt brokers @@ -174,6 +176,10 @@ one could also add the extras:: $ pip install metpx-sr3[amqp,mqtt,vip] +for all the extras, there is a shortcut:: + + $ pip install metpx-sr3[all] + and to upgrade after the initial installation:: $ pip install metpx-sr3 diff --git "a/docs/source/fr/Contribution/D\303\251veloppement.rst" "b/docs/source/fr/Contribution/D\303\251veloppement.rst" index eb94d68f5..282921eb0 100644 --- "a/docs/source/fr/Contribution/D\303\251veloppement.rst" +++ "b/docs/source/fr/Contribution/D\303\251veloppement.rst" @@ -133,28 +133,35 @@ Installation locale Il existe de nombreuses façons d’installer des paquets python sur un ordinateur. Différents développeurs préféreront différentes méthodes, et toutes les méthodes doivent être testées avant chaque version. +Avant d´installer le paquet il faut généralement une librarie pour communiquer avec le courtier +de messages (généralement rabbitmq/AMQP, mais ca peut être MQTT également) * **Wheel** Lorsque les gens utilisent différents systèmes d’exploitation (non-Ubuntu, non-Debian), les gens installent des wheel, généralement qui ont été téléchargées sur pypi.python.org. D’un autre côté, c’est un peu pénible / bruyant de télécharger chaque version de développement, donc nous ne téléchargeons que des versions, donc les tests de wheel se font en construisant des roues locales. Besoin de construire une nouvelle wheel chaque fois qu’un changement est apporté. + *pip install amqp* sera également nécessaire pour le support rabbitmq. * **pip install (pas -e)** tirerait une wheel vers le bas de pypi.python.org. Généralement pas utilisé pendant le développement de Sarracenia lui-même. + *pip install amqp* sera également nécessaire pour le support rabbitmq. * **pip install -e** ... vous permet de modifier le code source du package installé, idéal pour les problèmes de débogage, car il permet des modifications en direct de l’application sans avoir à passer par la construction et l’installation d’un nouveau package. + *pip install amqp* sera également nécessaire pour le support rabbitmq. * **apt install** installer le paquet Debian à partir de dépôts, de la même manière que pip install (pas -e), normalement les instantanés de développement ne sont pas téléchargés vers des dépôts, donc bien que ce soit la manière normale pour les utilisateurs de serveurs Ubuntu, il n’est pas disponible pendant le développement du paquet lui-même. + *apt install python3-amqp* sera également nécessaire pour le support rabbitmq. * **dpkg -i** construit un paquet Debian pour l’installation locale. C’est ainsi que les packages sont testés avant d’être téléchargés vers des référentiels. Il peut également être utilisé pour soutenir le développement (il faut exécuter dpkg -i pour chaque changement de paquet). + *apt install python3-amqp* sera également nécessaire pour le support rabbitmq. Le test sr_insects appelle la version de metpx-sarracenia installée sur le système, et non ce qui est dans l’arbre de développement. Il est nécessaire d’installer le paquet sur diff --git a/docs/source/fr/Tutoriel/Installer.rst b/docs/source/fr/Tutoriel/Installer.rst index e0add8af6..4c938538a 100644 --- a/docs/source/fr/Tutoriel/Installer.rst +++ b/docs/source/fr/Tutoriel/Installer.rst @@ -48,12 +48,13 @@ Sur Ubuntu 22.04 et dérivés du même:: sudo add-apt-repository ppa:ssc-hpc-chp-spc/metpx sudo apt update - sudo apt install metpx-sr3 # main python package. - sudo apt install metpx-sr3c # optional C client. - sudo apt install python3-amqp # optionally support rabbitmq brokers - sudo apt install python3-paho-mqtt # optionally support MQTT brokers - sudo apt install python3-netifaces # optionally support the vip directive (HA failover.) - sudo apt install python3-dateparser python3-pytz # optionally support ftp polling. + sudo apt install metpx-sr3 # pacquet principale. + sudo apt install metpx-sr3c # client binaire (en C) . + sudo apt install python3-amqp # support optionnel pour les courtiers AMWP (rabbitmq) + sudo apt install python3-magic # support optionnel pour les entêtes "content-type" dans les messages + sudo apt install python3-paho-mqtt # support optionnel pour les courtiers MQTT + sudo apt install python3-netifaces # support optionnel pour les vip (haut-disponibilité) + sudo apt install python3-dateparser python3-pytz # support optionnel pour les sondages ftp. Si les paquets ne sont pas disponibles, on peut les remplacer en utilisant python install package (pip) Actuellement, seuls les paquets Debian incluent des pages de manuel. Les guides sont seulement @@ -127,6 +128,7 @@ Par exemple, sur fedora 28 obligatoirement:: Facultatifs:: $ sudo dnf install python3-amqp # optionally support rabbitmq brokers + $ sudo dnf install python3-magic # optionally support content-type headers in files. $ sudo dnf install python3-netifaces # optionally support vip directive for HA. $ sudo dnf install python3-paho-mqtt # optionally support mqtt brokers @@ -161,6 +163,10 @@ on pourrait aussi ajouter les extras:: $ pip install metpx-sr3[amqp,mqtt,vip] +Si veut avoir tous les extras:: + + $ pip install metpx-sr3[all] + et à mettre à niveau après l’installation initiale:: $ pip install metpx-sr3