diff --git a/Dockerfile.test b/Dockerfile.test new file mode 100644 index 00000000..68159127 --- /dev/null +++ b/Dockerfile.test @@ -0,0 +1,17 @@ +ARG PYTHON_VERSION=3.12 +FROM public.ecr.aws/docker/library/python:${PYTHON_VERSION}-slim + +RUN apt-get update && apt-get install -y \ + gcc \ + g++ \ + make \ + git \ + pkg-config \ + default-libmysqlclient-dev \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +RUN pip install --no-cache-dir --upgrade pip + +CMD ["echo", "Container ready..."] \ No newline at end of file diff --git a/README.md b/README.md index 20e08ff5..fc71e547 100644 --- a/README.md +++ b/README.md @@ -55,9 +55,29 @@ You can find more documentation covering supported components and minimum versio Bug reports and pull requests are welcome on GitHub at https://github.com/instana/python-sensor. -## More +## Run Tests with Docker +Docker compose file currently supports up to Python version 3.14. + +Run tests for all Python versions: +- `docker compose --profile infra-general --profile tests-general up --build --abort-on-container-exit | grep python-sensor-test` + +To run tests for a specific Python version using Docker, 3.12 for example: +- `docker compose --profile infra-general --profile py312 up --build --abort-on-container-exit | grep python-sensor-test` + +Cassandra Tests: +- `docker compose --profile infra-cassandra --profile tests-cassandra up --build --abort-on-container-exit | grep python-sensor-test` -Want to instrument other languages? See our [Node.js], [Go], [Ruby] instrumentation or many other [supported technologies]. +Kafka Tests: +- `docker compose --profile infra-kafka --profile tests-kafka up --build --abort-on-container-exit | grep python-sensor-test` + +Gevent Tests: +- `docker compose --profile tests-gevent up --build --abort-on-container-exit | grep python-sensor-test` + +AWS Tests: +- `docker compose --profile tests-aws up --build --abort-on-container-exit | grep python-sensor-test` + +## More +Want to instrument other languages? See our [Node.js], [Go], [Ruby] instrumentation or many other [supported technologies]. [Instana]: https://www.instana.com/ "IBM Instana Observability" diff --git a/docker-compose.yml b/docker-compose.yml index 299806a5..6e0d4262 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,27 +1,65 @@ +x-common-env: &common-env + PYTHONUNBUFFERED: "1" + REDIS_HOST: redis + MYSQL_HOST: mariadb + MONGO_HOST: mongodb + POSTGRES_HOST: postgres + RABBITMQ_HOST: rabbitmq + PUBSUB_EMULATOR_HOST: pubsub:8681 + +x-general-deps: &general-deps + redis: { condition: service_healthy } + mariadb: { condition: service_healthy } + mongodb: { condition: service_healthy } + postgres: { condition: service_healthy } + rabbitmq: { condition: service_healthy } + pubsub: { condition: service_started } + +x-cassandra-deps: &cassandra-deps + cassandra: { condition: service_healthy } + +x-kafka-deps: &kafka-deps + kafka: { condition: service_healthy } + +x-test-template: &test-template + build: + context: . + dockerfile: Dockerfile.test + environment: + <<: *common-env + networks: + - default + +x-standard-test-command: &standard-test-command > + /bin/bash -c " + pip install --upgrade pip setuptools wheel && + pip install wrapt --upgrade --force-reinstall && + pip install -r requirements.txt && + pip install -r tests/requirements.txt && + + coverage run --source=instana -m pytest -v --junitxml=test-results/junit.xml tests && + coverage report -m + " + services: + redis: image: public.ecr.aws/docker/library/redis + profiles: ["infra-general", "all-infra"] volumes: - ./tests/conf/redis.conf:/usr/local/etc/redis/redis.conf:Z command: redis-server /usr/local/etc/redis/redis.conf - ports: - - "0.0.0.0:6379:6379" - - cassandra: - image: public.ecr.aws/docker/library/cassandra - ports: - - 9042:9042 - - couchbase: - image: public.ecr.aws/docker/library/couchbase:community - ports: - - 8091-8094:8091-8094 - - 11210:11210 + ports: ["6379:6379"] + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 3s + retries: 10 mariadb: image: public.ecr.aws/docker/library/mariadb - ports: - - 3306:3306 + profiles: ["infra-general", "all-infra"] + ports: ["3306:3306"] environment: MYSQL_DATABASE: 'instana_test_db' MYSQL_USER: 'root' @@ -29,55 +67,86 @@ services: MYSQL_ROOT_HOST: '%' volumes: - ./tests/config/database/mysql/conf.d/mysql.cnf:/etc/mysql/conf.d/mysql.cnf:Z + healthcheck: + test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"] + interval: 5s + timeout: 3s + retries: 20 mongodb: image: public.ecr.aws/docker/library/mongo - ports: - - '27017:27017' + profiles: ["infra-general", "all-infra"] + ports: ["27017:27017"] + healthcheck: + test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"] + interval: 5s + timeout: 3s + retries: 10 postgres: image: public.ecr.aws/docker/library/postgres - ports: - - 5432:5432 + profiles: ["infra-general", "all-infra"] + ports: ["5432:5432"] environment: POSTGRES_USER: root POSTGRES_PASSWORD: passw0rd POSTGRES_DB: instana_test_db + healthcheck: + test: ["CMD-SHELL", "pg_isready -U root -d instana_test_db"] + interval: 5s + timeout: 3s + retries: 10 rabbitmq: image: public.ecr.aws/docker/library/rabbitmq + profiles: ["infra-general", "all-infra"] environment: - RABBITMQ_NODENAME=rabbit@localhost - ports: - - 5671:5671 - - 5672:5672 + ports: ["5671:5671", "5672:5672"] + healthcheck: + test: ["CMD", "rabbitmq-diagnostics", "check_port_connectivity"] + interval: 5s + timeout: 3s + retries: 20 + start_period: 30s pubsub: image: quay.io/thekevjames/gcloud-pubsub-emulator:latest + profiles: ["infra-general", "all-infra"] environment: - PUBSUB_EMULATOR_HOST=0.0.0.0:8681 - PUBSUB_PROJECT1=test-project,test-topic - ports: - - "8681:8681" - - "8682:8682" + ports: ["8681:8681", "8682:8682"] + + cassandra: + image: public.ecr.aws/docker/library/cassandra + profiles: ["infra-cassandra", "all-infra"] + ports: ["9042:9042"] + environment: + MAX_HEAP_SIZE: 2048m + HEAP_NEWSIZE: 512m + healthcheck: + test: ["CMD-SHELL", "cqlsh -e 'describe cluster'"] + interval: 10s + timeout: 5s + retries: 30 - # Sidecar container for Kafka zookeeper: image: public.ecr.aws/ubuntu/zookeeper:3.1-22.04_edge + profiles: ["infra-kafka", "all-infra"] ports: ["2181:2181"] environment: [ "TZ=UTC" ] kafka: image: public.ecr.aws/ubuntu/kafka:3.1-22.04_edge + profiles: ["infra-kafka", "all-infra"] depends_on: [zookeeper] - ports: - - "9094:9094" - - "9093:9093" + ports: ["9094:9094", "9093:9093"] environment: - TZ=UTC - ZOOKEEPER_HOST=zookeeper - ZOOKEEPER_PORT=2181 - command: + command: - /opt/kafka/config/server.properties - --override - listeners=INTERNAL://0.0.0.0:9093,EXTERNAL://0.0.0.0:9094 @@ -97,3 +166,201 @@ services: - transaction.state.log.min.isr=1 - --override - auto.create.topics.enable=true + healthcheck: + test: ["CMD-SHELL", "kafka-broker-api-versions.sh --bootstrap-server localhost:9093 || exit 1"] + interval: 10s + timeout: 5s + retries: 20 + + # ================================================================ + # GENERAL TEST MATRIX + # ================================================================ + + test-py39: + <<: *test-template + image: python-sensor-test:3.9 + container_name: python-sensor-test-py39 + profiles: ["tests-general", "py39"] + build: + context: . + dockerfile: Dockerfile.test + args: + PYTHON_VERSION: "3.9" + depends_on: *general-deps + volumes: [".:/app", "test-results-py39:/app/test-results"] + command: *standard-test-command + + test-py310: + <<: *test-template + image: python-sensor-test:3.10 + container_name: python-sensor-test-py310 + profiles: ["tests-general", "py310"] + build: + context: . + dockerfile: Dockerfile.test + args: + PYTHON_VERSION: "3.10" + depends_on: *general-deps + volumes: [".:/app", "test-results-py310:/app/test-results"] + command: *standard-test-command + + test-py311: + <<: *test-template + image: python-sensor-test:3.11 + container_name: python-sensor-test-py311 + profiles: ["tests-general", "py311"] + build: + context: . + dockerfile: Dockerfile.test + args: + PYTHON_VERSION: "3.11" + depends_on: *general-deps + volumes: [".:/app", "test-results-py311:/app/test-results"] + command: *standard-test-command + + test-py312: + <<: *test-template + image: python-sensor-test:3.12 + container_name: python-sensor-test-py312 + profiles: ["tests-general", "py312"] + build: + context: . + dockerfile: Dockerfile.test + args: + PYTHON_VERSION: "3.12" + depends_on: *general-deps + volumes: [".:/app", "test-results-py312:/app/test-results"] + command: *standard-test-command + + test-py313: + <<: *test-template + image: python-sensor-test:3.13 + container_name: python-sensor-test-py313 + profiles: ["tests-general", "py313"] + build: + context: . + dockerfile: Dockerfile.test + args: + PYTHON_VERSION: "3.13" + depends_on: *general-deps + volumes: [".:/app", "test-results-py313:/app/test-results"] + command: *standard-test-command + + # ================================================================ + # SPECIALIZED TESTS + # ================================================================ + + test-cassandra: + <<: *test-template + image: python-sensor-test:3.9 + container_name: python-sensor-test-cassandra + profiles: ["tests-cassandra"] + build: + context: . + dockerfile: Dockerfile.test + args: + PYTHON_VERSION: "3.9" + depends_on: *cassandra-deps + volumes: [".:/app", "test-results-cassandra:/app/test-results"] + environment: + <<: *common-env + CASSANDRA_HOST: cassandra + CASSANDRA_TEST: "true" + command: > + /bin/bash -c " + pip install --upgrade pip setuptools wheel && + pip install wrapt --upgrade --force-reinstall && + pip install -r requirements.txt && + pip install -r tests/requirements-cassandra.txt && + + coverage run --source=instana -m pytest -v --junitxml=test-results/junit.xml tests/clients/test_cassandra-driver.py && + coverage report -m + " + + test-kafka: + <<: *test-template + image: python-sensor-test:3.13 + container_name: python-sensor-test-kafka + profiles: ["tests-kafka"] + build: + context: . + dockerfile: Dockerfile.test + args: + PYTHON_VERSION: "3.13" + depends_on: *kafka-deps + volumes: [".:/app", "test-results-kafka:/app/test-results"] + environment: + <<: *common-env + KAFKA_HOST: kafka + KAFKA_PORT: "9093" + KAFKA_TEST: "true" + command: > + /bin/bash -c " + pip install --upgrade pip setuptools wheel && + pip install wrapt --upgrade --force-reinstall && + pip install -r requirements.txt && + pip install -r tests/requirements-kafka.txt && + + coverage run --source=instana -m pytest -v --junitxml=test-results/junit.xml tests/clients/kafka/test*.py && + coverage report -m + " + + test-gevent: + <<: *test-template + image: python-sensor-test:3.9 + container_name: python-sensor-test-gevent + profiles: ["tests-gevent"] + build: + context: . + dockerfile: Dockerfile.test + args: + PYTHON_VERSION: "3.9" + volumes: [".:/app", "test-results-gevent:/app/test-results"] + environment: + <<: *common-env + GEVENT_TEST: "true" + command: > + /bin/bash -c " + pip install --upgrade pip setuptools wheel && + pip install wrapt --upgrade --force-reinstall && + pip install -r requirements.txt && + pip install -r tests/requirements-gevent-starlette.txt && + + coverage run --source=instana -m pytest -v --junitxml=test-results/junit.xml tests/frameworks/test_gevent.py && + coverage report -m + " + + test-aws: + <<: *test-template + image: python-sensor-test:3.12 + container_name: python-sensor-test-aws + profiles: ["tests-aws"] + build: + context: . + dockerfile: Dockerfile.test + args: + PYTHON_VERSION: "3.12" + volumes: [".:/app", "test-results-aws:/app/test-results"] + # AWS test uses common env, no extra vars needed, so we rely on template + command: > + /bin/bash -c " + pip install --upgrade pip setuptools wheel && + pip install wrapt --upgrade --force-reinstall && + pip install -r requirements.txt && + pip install -r tests/requirements-aws.txt && + + coverage run --source=instana -m pytest -v --junitxml=test-results/junit.xml tests_aws && + coverage report -m + " + +volumes: + test-results-py39: + test-results-py310: + test-results-py311: + test-results-py312: + test-results-py313: + test-results-general: + test-results-cassandra: + test-results-kafka: + test-results-gevent: + test-results-aws: \ No newline at end of file diff --git a/src/instana/instrumentation/kafka/confluent_kafka_python.py b/src/instana/instrumentation/kafka/confluent_kafka_python.py index d406f7c1..f922140d 100644 --- a/src/instana/instrumentation/kafka/confluent_kafka_python.py +++ b/src/instana/instrumentation/kafka/confluent_kafka_python.py @@ -17,8 +17,12 @@ from instana.span.span import InstanaSpan from instana.util.traceutils import get_tracer_tuple, tracing_is_off - consumer_token = None - consumer_span = contextvars.ContextVar("confluent_kafka_consumer_span") + consumer_token = contextvars.ContextVar( + "confluent_kafka_consumer_token", default=None + ) + consumer_span = contextvars.ContextVar( + "confluent_kafka_consumer_span", default=None + ) # As confluent_kafka is a wrapper around the C-developed librdkafka # (provided automatically via binary wheels), we have to create new classes @@ -178,24 +182,23 @@ def create_span( ) # pragma: no cover def save_consumer_span_into_context(span: "InstanaSpan") -> None: - global consumer_token ctx = trace.set_span_in_context(span) - consumer_token = context.attach(ctx) + token = context.attach(ctx) + consumer_token.set(token) consumer_span.set(span) def close_consumer_span(span: "InstanaSpan") -> None: - global consumer_token if span.is_recording(): span.end() consumer_span.set(None) - if consumer_token is not None: - context.detach(consumer_token) - consumer_token = None + token = consumer_token.get(None) + if token is not None: + context.detach(token) + consumer_token.set(None) def clear_context() -> None: - global consumer_token context.attach(trace.set_span_in_context(None)) - consumer_token = None + consumer_token.set(None) consumer_span.set(None) def trace_kafka_consume( @@ -253,6 +256,10 @@ def trace_kafka_poll( res = wrapped(*args, **kwargs) if res: create_span("poll", res.topic(), res.headers()) + else: + span = consumer_span.get(None) + if span is not None: + close_consumer_span(span) return res except Exception as exc: exception = exc diff --git a/tests/clients/kafka/test_confluent_kafka.py b/tests/clients/kafka/test_confluent_kafka.py index bc1d85b7..b8913649 100644 --- a/tests/clients/kafka/test_confluent_kafka.py +++ b/tests/clients/kafka/test_confluent_kafka.py @@ -709,19 +709,19 @@ def test_save_consumer_span_into_context(self, span: "InstanaSpan") -> None: """Test save_consumer_span_into_context function.""" # Verify initial state assert consumer_span.get(None) is None - assert confluent_kafka_python.consumer_token is None + assert confluent_kafka_python.consumer_token.get(None) is None # Save span into context save_consumer_span_into_context(span) # Verify token is stored - assert confluent_kafka_python.consumer_token is not None + assert confluent_kafka_python.consumer_token.get(None) is not None def test_close_consumer_span_recording_span(self, span: "InstanaSpan") -> None: """Test close_consumer_span with a recording span.""" # Save span into context first save_consumer_span_into_context(span) - assert confluent_kafka_python.consumer_token is not None + assert confluent_kafka_python.consumer_token.get(None) is not None # Verify span is recording assert span.is_recording() @@ -732,7 +732,7 @@ def test_close_consumer_span_recording_span(self, span: "InstanaSpan") -> None: # Verify span was ended and context cleared assert not span.is_recording() assert consumer_span.get(None) is None - assert confluent_kafka_python.consumer_token is None + assert confluent_kafka_python.consumer_token.get(None) is None def test_clear_context(self, span: "InstanaSpan") -> None: """Test clear_context function.""" @@ -741,14 +741,14 @@ def test_clear_context(self, span: "InstanaSpan") -> None: # Verify context has data assert consumer_span.get(None) == span - assert confluent_kafka_python.consumer_token is not None + assert confluent_kafka_python.consumer_token.get(None) is not None # Clear context clear_context() # Verify all context is cleared assert consumer_span.get(None) is None - assert confluent_kafka_python.consumer_token is None + assert confluent_kafka_python.consumer_token.get(None) is None def test_trace_kafka_close_exception_handling(self, span: "InstanaSpan") -> None: """Test trace_kafka_close handles exceptions and still cleans up spans.""" @@ -757,7 +757,7 @@ def test_trace_kafka_close_exception_handling(self, span: "InstanaSpan") -> None # Verify span is in context assert consumer_span.get(None) == span - assert confluent_kafka_python.consumer_token is not None + assert confluent_kafka_python.consumer_token.get(None) is not None # Mock a wrapped function that raises an exception mock_wrapped = Mock(side_effect=Exception("Close operation failed")) @@ -772,7 +772,7 @@ def test_trace_kafka_close_exception_handling(self, span: "InstanaSpan") -> None # Verify that despite the exception, the span was cleaned up assert consumer_span.get(None) is None - assert confluent_kafka_python.consumer_token is None + assert confluent_kafka_python.consumer_token.get(None) is None # Verify span was ended assert not span.is_recording() diff --git a/tests/clients/test_aio_pika.py b/tests/clients/test_aio_pika.py index 6d2102c6..8b924e0f 100644 --- a/tests/clients/test_aio_pika.py +++ b/tests/clients/test_aio_pika.py @@ -6,6 +6,7 @@ from aio_pika import Message, connect, connect_robust from instana.singletons import agent, get_tracer +from tests.helpers import testenv if TYPE_CHECKING: from instana.span.readable_span import ReadableSpan @@ -33,7 +34,9 @@ def _resource(self) -> Generator[None, None, None]: async def publish_message(self, params_combination: str = "both_args") -> None: # Perform connection - connection = await connect() + connection = await connect( + host=testenv["rabbitmq_host"], port=testenv["rabbitmq_port"] + ) async with connection: # Creating a channel @@ -68,14 +71,18 @@ async def publish_message(self, params_combination: str = "both_args") -> None: await exchange.publish(*args, **kwargs) async def delete_queue(self) -> None: - connection = await connect() + connection = await connect( + host=testenv["rabbitmq_host"], port=testenv["rabbitmq_port"] + ) async with connection: channel = await connection.channel() await channel.queue_delete(self.queue_name) async def consume_message(self, connect_method) -> None: - connection = await connect_method() + connection = await connect_method( + host=testenv["rabbitmq_host"], port=testenv["rabbitmq_port"] + ) async with connection: # Creating channel @@ -91,7 +98,9 @@ async def consume_message(self, connect_method) -> None: break async def consume_with_exception(self, connect_method) -> None: - connection = await connect_method() + connection = await connect_method( + host=testenv["rabbitmq_host"], port=testenv["rabbitmq_port"] + ) async def on_message(msg): raise RuntimeError("Simulated Exception") diff --git a/tests/clients/test_aioamqp.py b/tests/clients/test_aioamqp.py index 960190b0..c6e75089 100644 --- a/tests/clients/test_aioamqp.py +++ b/tests/clients/test_aioamqp.py @@ -12,9 +12,6 @@ from aioamqp.properties import Properties from aioamqp.envelope import Envelope -testenv["rabbitmq_host"] = "127.0.0.1" -testenv["rabbitmq_port"] = 5672 - class TestAioamqp: @pytest.fixture(autouse=True) @@ -25,19 +22,39 @@ def _resource(self) -> Generator[None, None, None]: self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) + self.transport = None + self.protocol = None yield - self.loop.run_until_complete(self.delete_queue()) - if self.loop.is_running(): - self.loop.close() + # Cleanup + try: + self.loop.run_until_complete(self.delete_queue()) + # Cancel all pending tasks + pending = asyncio.all_tasks(self.loop) + for task in pending: + task.cancel() + # Wait for all tasks to be cancelled + if pending: + self.loop.run_until_complete( + asyncio.gather(*pending, return_exceptions=True) + ) + except Exception: + pass + finally: + if not self.loop.is_closed(): + self.loop.close() async def delete_queue(self) -> None: - _, protocol = await aioamqp.connect( - testenv["rabbitmq_host"], - testenv["rabbitmq_port"], - ) - channel = await protocol.channel() - await channel.queue_delete("message_queue") - await asyncio.sleep(1) + try: + transport, protocol = await aioamqp.connect( + testenv["rabbitmq_host"], + testenv["rabbitmq_port"], + ) + channel = await protocol.channel() + await channel.queue_delete("message_queue") + await protocol.close() + transport.close() + except Exception: + pass # Queue might not exist or connection might fail during cleanup async def publish_message(self) -> None: transport, protocol = await aioamqp.connect( @@ -66,13 +83,20 @@ async def callback( with self.tracer.start_as_current_span("callback-span"): await channel.basic_client_ack(delivery_tag=envelope.delivery_tag) - _, protocol = await aioamqp.connect( + self.transport, self.protocol = await aioamqp.connect( testenv["rabbitmq_host"], testenv["rabbitmq_port"] ) - channel = await protocol.channel() + channel = await self.protocol.channel() await channel.queue_declare(queue_name="message_queue") await channel.basic_consume(callback, queue_name="message_queue", no_ack=False) + # Wait for the message to be consumed + await asyncio.sleep(0.5) + + # Close the consumer connection + await self.protocol.close() + self.transport.close() + def test_basic_publish(self) -> None: with self.tracer.start_as_current_span("test-span"): self.loop.run_until_complete(self.publish_message()) @@ -86,7 +110,10 @@ def test_basic_publish(self) -> None: assert publisher_span.n == "amqp" assert publisher_span.data["amqp"]["command"] == "publish" assert publisher_span.data["amqp"]["routingkey"] == "message_queue" - assert publisher_span.data["amqp"]["connection"] == "127.0.0.1:5672" + assert publisher_span.data["amqp"]["connection"] + assert ( + str(testenv["rabbitmq_port"]) in publisher_span.data["amqp"]["connection"] + ) assert publisher_span.p == test_span.s @@ -110,7 +137,10 @@ def test_basic_consumer(self) -> None: assert publisher_span.n == "amqp" assert publisher_span.data["amqp"]["command"] == "publish" assert publisher_span.data["amqp"]["routingkey"] == "message_queue" - assert publisher_span.data["amqp"]["connection"] == "127.0.0.1:5672" + assert publisher_span.data["amqp"]["connection"] + assert ( + str(testenv["rabbitmq_port"]) in publisher_span.data["amqp"]["connection"] + ) assert publisher_span.p == test_span.s assert callback_span.n == "sdk" @@ -121,7 +151,8 @@ def test_basic_consumer(self) -> None: assert consumer_span.n == "amqp" assert consumer_span.data["amqp"]["command"] == "consume" assert consumer_span.data["amqp"]["routingkey"] == "message_queue" - assert consumer_span.data["amqp"]["connection"] == "127.0.0.1:5672" + assert consumer_span.data["amqp"]["connection"] + assert str(testenv["rabbitmq_port"]) in consumer_span.data["amqp"]["connection"] assert ( consumer_span.data["amqp"]["connection"] == publisher_span.data["amqp"]["connection"] diff --git a/tests/clients/test_google-cloud-pubsub.py b/tests/clients/test_google-cloud-pubsub.py index 98168bda..173b26f8 100644 --- a/tests/clients/test_google-cloud-pubsub.py +++ b/tests/clients/test_google-cloud-pubsub.py @@ -17,8 +17,8 @@ from instana.span.span import get_current_span from tests.test_utils import _TraceContextMixin -# Use PubSub Emulator exposed at :8085 -os.environ["PUBSUB_EMULATOR_HOST"] = "localhost:8681" +# Use PubSub Emulator exposed at :8681 +os.environ["PUBSUB_EMULATOR_HOST"] = os.getenv("PUBSUB_EMULATOR_HOST", "localhost:8681") class TestPubSubPublish(_TraceContextMixin): diff --git a/tests/clients/test_pep0249.py b/tests/clients/test_pep0249.py index ff78bf99..96c32df7 100644 --- a/tests/clients/test_pep0249.py +++ b/tests/clients/test_pep0249.py @@ -93,12 +93,12 @@ def test_cursor_wrapper_default(self) -> None: # Test Connection assert ( self.test_conn.dsn - == "user=root password=xxx dbname=instana_test_db host=127.0.0.1 port=5432" + == f'user=root password=xxx dbname=instana_test_db host={testenv["postgresql_host"]} port=5432' ) assert not self.test_conn.autocommit assert self.test_conn.status == 1 assert self.test_conn.info.dbname == "instana_test_db" - assert self.test_conn.info.host == "127.0.0.1" + assert self.test_conn.info.host == testenv["postgresql_host"] assert self.test_conn.info.user == "root" assert self.test_conn.info.port == 5432 @@ -122,7 +122,7 @@ def test_collect_kvs(self) -> None: assert span.attributes["db.name"] == "instana_test_db" assert span.attributes["db.statement"] == sample_sql assert span.attributes["db.user"] == "root" - assert span.attributes["host"] == "127.0.0.1" + assert span.attributes["host"] == testenv["postgresql_host"] assert span.attributes["port"] == 5432 def test_collect_kvs_error(self, caplog: LogCaptureFixture) -> None: @@ -260,11 +260,11 @@ def _resource(self) -> Generator[None, None, None]: self.connect_params = [ "db", { - "db": "instana_test_db", - "host": "localhost", - "port": "5432", - "user": "root", - "password": "passw0rd", + "db": testenv["postgresql_db"], + "host": testenv["postgresql_host"], + "port": testenv["postgresql_port"], + "user": testenv["postgresql_user"], + "password": testenv["postgresql_pw"], }, ] self.test_conn = psycopg2.connect( @@ -319,7 +319,7 @@ def _resource(self) -> Generator[None, None, None]: def test_call(self) -> None: response = self.conn_fact( - dsn="user=root password=passw0rd dbname=instana_test_db host=localhost port=5432" + dsn=f'user=root password=passw0rd dbname=instana_test_db host={testenv["postgresql_host"]} port=5432' ) assert isinstance(self.conn_fact._wrapper_ctor, ConnectionWrapper.__class__) assert self.conn_fact._connect_func == self.test_conn_func diff --git a/tests/conftest.py b/tests/conftest.py index 44088c85..2d9c7c87 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -91,10 +91,11 @@ @pytest.fixture(scope="session") def celery_config(): + redis_host = os.environ.get("REDIS_HOST", "localhost") return { "broker_connection_retry_on_startup": True, - "broker_url": "redis://localhost:6379", - "result_backend": "redis://localhost:6379", + "broker_url": f"redis://{redis_host}:6379", + "result_backend": f"redis://{redis_host}:6379", } diff --git a/tests/frameworks/test_celery.py b/tests/frameworks/test_celery.py index 90bd4318..6af4d3ce 100644 --- a/tests/frameworks/test_celery.py +++ b/tests/frameworks/test_celery.py @@ -2,6 +2,7 @@ # (c) Copyright Instana Inc. 2020 +import os import time from typing import Generator, List @@ -52,6 +53,7 @@ def _resource(self) -> Generator[None, None, None]: self.tracer = get_tracer() self.recorder = self.tracer.span_processor self.recorder.clear_spans() + self.redis_host = os.environ.get("REDIS_HOST", "localhost") yield def test_apply_async( @@ -92,7 +94,7 @@ def filter(span): assert client_span.data["celery"]["task"] == "tests.frameworks.test_celery.add" assert client_span.data["celery"]["scheme"] == "redis" - assert client_span.data["celery"]["host"] == "localhost" + assert client_span.data["celery"]["host"] == self.redis_host assert client_span.data["celery"]["port"] == "6379" assert client_span.data["celery"]["task_id"] assert not client_span.data["celery"]["error"] @@ -100,7 +102,7 @@ def filter(span): assert worker_span.data["celery"]["task"] == "tests.frameworks.test_celery.add" assert worker_span.data["celery"]["scheme"] == "redis" - assert worker_span.data["celery"]["host"] == "localhost" + assert worker_span.data["celery"]["host"] == self.redis_host assert worker_span.data["celery"]["port"] == "6379" assert worker_span.data["celery"]["task_id"] assert not worker_span.data["celery"]["error"] @@ -145,7 +147,7 @@ def filter(span): assert client_span.data["celery"]["task"] == "tests.frameworks.test_celery.add" assert client_span.data["celery"]["scheme"] == "redis" - assert client_span.data["celery"]["host"] == "localhost" + assert client_span.data["celery"]["host"] == self.redis_host assert client_span.data["celery"]["port"] == "6379" assert client_span.data["celery"]["task_id"] assert not client_span.data["celery"]["error"] @@ -153,7 +155,7 @@ def filter(span): assert worker_span.data["celery"]["task"] == "tests.frameworks.test_celery.add" assert worker_span.data["celery"]["scheme"] == "redis" - assert worker_span.data["celery"]["host"] == "localhost" + assert worker_span.data["celery"]["host"] == self.redis_host assert worker_span.data["celery"]["port"] == "6379" assert worker_span.data["celery"]["task_id"] assert not worker_span.data["celery"]["error"] @@ -198,7 +200,7 @@ def filter(span): assert client_span.data["celery"]["task"] == "tests.frameworks.test_celery.add" assert client_span.data["celery"]["scheme"] == "redis" - assert client_span.data["celery"]["host"] == "localhost" + assert client_span.data["celery"]["host"] == self.redis_host assert client_span.data["celery"]["port"] == "6379" assert client_span.data["celery"]["task_id"] assert not client_span.data["celery"]["error"] @@ -206,7 +208,7 @@ def filter(span): assert worker_span.data["celery"]["task"] == "tests.frameworks.test_celery.add" assert worker_span.data["celery"]["scheme"] == "redis" - assert worker_span.data["celery"]["host"] == "localhost" + assert worker_span.data["celery"]["host"] == self.redis_host assert worker_span.data["celery"]["port"] == "6379" assert worker_span.data["celery"]["task_id"] assert not worker_span.data["celery"]["error"] @@ -264,7 +266,7 @@ def filter(span): == "tests.frameworks.test_celery.will_raise_error" ) assert client_span.data["celery"]["scheme"] == "redis" - assert client_span.data["celery"]["host"] == "localhost" + assert client_span.data["celery"]["host"] == self.redis_host assert client_span.data["celery"]["port"] == "6379" assert client_span.data["celery"]["task_id"] assert not client_span.data["celery"]["error"] @@ -275,7 +277,7 @@ def filter(span): == "tests.frameworks.test_celery.will_raise_error" ) assert worker_span.data["celery"]["scheme"] == "redis" - assert worker_span.data["celery"]["host"] == "localhost" + assert worker_span.data["celery"]["host"] == self.redis_host assert worker_span.data["celery"]["port"] == "6379" assert worker_span.data["celery"]["task_id"] assert worker_span.data["celery"]["error"] == "This is a simulated error" diff --git a/tests/helpers.py b/tests/helpers.py index 07cd94e0..7da183ea 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -25,11 +25,7 @@ """ MySQL Environment """ -if "MYSQL_HOST" in os.environ: - testenv["mysql_host"] = os.environ["MYSQL_HOST"] -else: - testenv["mysql_host"] = "127.0.0.1" - +testenv["mysql_host"] = os.environ.get("MYSQL_HOST", "127.0.0.1") testenv["mysql_port"] = int(os.environ.get("MYSQL_PORT", "3306")) testenv["mysql_db"] = os.environ.get("MYSQL_DATABASE", "instana_test_db") testenv["mysql_user"] = os.environ.get("MYSQL_USER", "root") @@ -62,7 +58,7 @@ RabbitMQ Environment """ testenv["rabbitmq_host"] = os.environ.get("RABBITMQ_HOST", "127.0.0.1") -testenv["rabbitmq_port"] = os.environ.get("RABBITMQ_PORT", 5672) +testenv["rabbitmq_port"] = int(os.environ.get("RABBITMQ_PORT", "5672")) """