diff --git a/CHANGES.rst b/CHANGES.rst index 1e28c2c9..1d16ce08 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,6 +1,11 @@ Changelog ========= +Unreleased Changes +------------------ + +* Fix ``Units set to "None"`` error when retrieving RDS "Total storage for all DB instances" quota from Service Quotas. AWS changed the quota unit from "Gigabytes" to "None" without announcement. We now allow both unit types (similar to the fix for ELB in Issue #503). + .. _changelog.12_0_0: 12.0.0 (2021-08-04) diff --git a/awslimitchecker/services/rds.py b/awslimitchecker/services/rds.py index 64e69877..7b15eadc 100644 --- a/awslimitchecker/services/rds.py +++ b/awslimitchecker/services/rds.py @@ -46,6 +46,24 @@ logger = logging.getLogger(__name__) +def allow_gigabytes_or_none_units(value, in_unit, out_unit): + """ + This is a unit converter for Service Quotas; see + :py:meth:`.ServiceQuotasClient.get_quota_value` for details. + + This is a work-around for AWS changing the quota unit for "Total storage + for all DB instances" from "Gigabytes" to "None" without announcement. + This converter allows both options and treats them identically. + """ + if in_unit not in ['None', 'Gigabytes'] or out_unit != 'Gigabytes': + logger.error( + 'ERROR: cannot convert Service Quotas RDS storage limit value from ' + 'units of "%s" to units of "%s"', in_unit, out_unit + ) + return None + return value + + class _RDSService(_AwsService): service_name = 'RDS' @@ -172,7 +190,8 @@ def get_limits(self): self.critical_threshold, limit_type='AWS::RDS::DBInstance', quotas_name='Total storage for all DB instances', - quotas_unit='Gigabytes' + quotas_unit='Gigabytes', + quotas_unit_converter=allow_gigabytes_or_none_units ) limits['DB snapshots per user'] = AwsLimit( 'DB snapshots per user', diff --git a/awslimitchecker/tests/services/test_rds.py b/awslimitchecker/tests/services/test_rds.py index 763b5e1d..7cb04704 100644 --- a/awslimitchecker/tests/services/test_rds.py +++ b/awslimitchecker/tests/services/test_rds.py @@ -39,7 +39,7 @@ import sys from awslimitchecker.tests.services import result_fixtures -from awslimitchecker.services.rds import _RDSService +from awslimitchecker.services.rds import _RDSService, allow_gigabytes_or_none_units # https://code.google.com/p/mock/issues/detail?id=249 # py>=3.4 should use unittest.mock not the mock package on pypi @@ -52,6 +52,18 @@ from unittest.mock import patch, call, Mock, DEFAULT +class TestAllowGigabytesOrNoneUnits: + + def test_none(self): + assert allow_gigabytes_or_none_units(100, 'None', 'Gigabytes') == 100 + + def test_gigabytes(self): + assert allow_gigabytes_or_none_units(100, 'Gigabytes', 'Gigabytes') == 100 + + def test_other(self): + assert allow_gigabytes_or_none_units(100, 'Other', 'Gigabytes') is None + + class Test_RDSService(object): pb = 'awslimitchecker.services.rds._RDSService' # patch base path @@ -96,6 +108,10 @@ def test_get_limits(self): assert limit.service == cls assert limit.def_warning_threshold == 21 assert limit.def_critical_threshold == 43 + if name == 'Storage quota (GB)': + assert limit.quotas_unit_converter == allow_gigabytes_or_none_units + else: + assert limit.quotas_unit_converter is None def test_get_limits_again(self): """test that existing limits dict is returned on subsequent calls"""