diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 54a015a9..e080fe81 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 4.3.1 +current_version = 4.3.2 commit = true tag = true tag_name = {new_version} diff --git a/CONTRIBUTORS.rst b/CONTRIBUTORS.rst index 1f02e449..1cc84055 100644 --- a/CONTRIBUTORS.rst +++ b/CONTRIBUTORS.rst @@ -51,4 +51,4 @@ Contributors * Zoltan Benedek * Øyvind Heddeland Instefjord * Pol Sanlorenzo - +* David Colangelo diff --git a/docs/transport.rst b/docs/transport.rst index 7479d34d..942ae8d0 100644 --- a/docs/transport.rst +++ b/docs/transport.rst @@ -141,6 +141,31 @@ Another option is to use the InMemoryCache backend. It internally uses a global dict to store urls with the corresponding content. +If you run your servers in a pool you may wish to share your WSDL cache across multiple servers if they are making +similar calls. To do this you can offload the cache to a shared redis instance by setting a redis cache as follows: + +.. code-block:: python + from zeep import Client + from zeep.transports import Transport + from zeep.cache import RedisCache + + cache = RedisCache( + redis_host="127.0.0.1", + password="APasswordYouLike", + timeout=60 + ) + + transport = Transport( + cache=cache, + ) + + client = Client( + 'http://www.webservicex.net/ConvertSpeed.asmx?WSDL', + transport=transport) + + + + HTTP Authentication ------------------- While some providers incorporate security features in the header of a SOAP message, diff --git a/pyproject.toml b/pyproject.toml index c151100a..54016fde 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "zeep" -version = "4.3.1" +version = "4.3.2" description = "A Python SOAP client" readme = "README.md" license = { text = "MIT" } @@ -28,6 +28,7 @@ dependencies = [ "requests-toolbelt>=0.7.1", "requests-file>=1.5.1", "pytz", + "redis>=5.2.1" ] [project.urls] diff --git a/src/zeep/cache.py b/src/zeep/cache.py index 3cc734a7..7132ee40 100644 --- a/src/zeep/cache.py +++ b/src/zeep/cache.py @@ -6,6 +6,8 @@ import threading from contextlib import contextmanager from typing import Dict, Tuple, Union +import redis +import json import platformdirs import pytz @@ -163,6 +165,70 @@ def get(self, url): return self._decode_data(data) logger.debug("Cache MISS for %s", url) +class RedisCache(Base): + """Cache contents via a redis database + - This is helpful if you make zeep calls from a pool of servers that need to share a common cache + """ + + def __init__(self, redis_host, password, port=6379, timeout=3600, health_check_interval=10, socket_timeout=5, retry_on_timeout=True, single_connection_client=True): + self._timeout = timeout + self._redis_host = redis_host + + self._redis_client = redis.StrictRedis( + host=redis_host, + port=port, + password=password, + health_check_interval=health_check_interval, + socket_timeout=socket_timeout, + retry_on_timeout=retry_on_timeout, + single_connection_client = single_connection_client + ) + + def add(self, url, content): + logger.debug("Caching contents of %s", url) + # Remove the cached key + self._redis_client.delete(url) + + try: + # Stringify the data and add the time so we know when it was written + data = json.dumps({ + 'time': datetime.datetime.now(datetime.timezone.utc).isoformat(), + 'value': base64.b64encode(content).decode('utf-8') + }) + + # add the new cache response for the url + self._redis_client.set(url, value=data) + except Exception as e: + logger.debug("Could not cache contents of %s", url) + logger.debug(e) + + def get(self, url): + + try: + value = self._redis_client.get(url) + if value is None: + logger.debug("Cache MISS for %s", url) + return None + + cached_value = json.loads(value) + except Exception as e: + logger.debug("Could not extract from cache contents of %s", url) + logger.debug(e) + # if we cant decode it just return none + return None + + if cached_value is not None and not _is_expired(datetime.datetime.fromisoformat(cached_value['time']), self._timeout): + logger.debug("Cache HIT for %s", url) + value = cached_value.get('value', None) + if value is not None: + return base64.b64decode(value) + else: + return None + else: + logger.debug("Cache MISS for %s", url) + return None + + def _is_expired(value, timeout): """Return boolean if the value is expired"""