diff --git a/app/backend/app.py b/app/backend/app.py index aabc506129..0f8e4843f8 100644 --- a/app/backend/app.py +++ b/app/backend/app.py @@ -561,8 +561,7 @@ def create_app(): app = Quart(__name__) app.register_blueprint(bp) - if os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING"): - configure_azure_monitor() + def instrument_app(): # This tracks HTTP requests made by aiohttp: AioHttpClientInstrumentor().instrument() # This tracks HTTP requests made by httpx: @@ -572,6 +571,20 @@ def create_app(): # This middleware tracks app route requests: app.asgi_app = OpenTelemetryMiddleware(app.asgi_app) # type: ignore[assignment] + if os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING"): + configure_azure_monitor() + instrument_app() + elif os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT"): + from otlp_tracing import configure_oltp_grpc_tracing + + configure_oltp_grpc_tracing( + service_name=os.getenv("OTEL_SERVICE_NAME", "azure-search-openai-demo"), + endpoint=os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT"), + insecure=os.getenv("OTEL_EXPORTER_OTLP_TRACES_INSECURE", "true").lower() == "true", + api_key=os.getenv("OTEL_EXPORTER_OTLP_TRACES_API_KEY"), + ) + instrument_app() + # Level should be one of https://docs.python.org/3/library/logging.html#logging-levels default_level = "INFO" # In development, log more verbosely if os.getenv("WEBSITE_HOSTNAME"): # In production, don't log as heavily diff --git a/app/backend/otlp_tracing.py b/app/backend/otlp_tracing.py new file mode 100644 index 0000000000..b481338b22 --- /dev/null +++ b/app/backend/otlp_tracing.py @@ -0,0 +1,52 @@ +import logging + +from opentelemetry import metrics, trace + +# Logging (Experimental) +from opentelemetry._logs import set_logger_provider +from opentelemetry.exporter.otlp.proto.grpc._log_exporter import ( + OTLPLogExporter, +) +from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler +from opentelemetry.sdk._logs.export import BatchLogRecordProcessor +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader +from opentelemetry.sdk.resources import SERVICE_NAME, Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor + + +def configure_oltp_grpc_tracing( + service_name: str = "azure-search-openai-demo", endpoint=None, insecure=True, api_key=None +): + # Service name is required for most backends + resource = Resource(attributes={SERVICE_NAME: service_name}) + + if api_key: + headers = {"x-otlp-api-key": api_key} + else: + headers = None + + # Configure Tracing + traceProvider = TracerProvider(resource=resource) + processor = BatchSpanProcessor(OTLPSpanExporter(endpoint=endpoint, insecure=insecure, headers=headers)) + traceProvider.add_span_processor(processor) + trace.set_tracer_provider(traceProvider) + + # Configure Metrics + reader = PeriodicExportingMetricReader(OTLPMetricExporter(endpoint=endpoint, insecure=insecure, headers=headers)) + meterProvider = MeterProvider(resource=resource, metric_readers=[reader]) + metrics.set_meter_provider(meterProvider) + + # Configure Logging + logger_provider = LoggerProvider(resource=resource) + set_logger_provider(logger_provider) + + exporter = OTLPLogExporter(endpoint=endpoint, insecure=insecure, headers=headers) + logger_provider.add_log_record_processor(BatchLogRecordProcessor(exporter)) + handler = LoggingHandler(level=logging.NOTSET, logger_provider=logger_provider) + + # Attach OTLP handler to root logger + logging.getLogger().addHandler(handler) diff --git a/app/backend/requirements.in b/app/backend/requirements.in index b0147e2f02..1008840ff8 100644 --- a/app/backend/requirements.in +++ b/app/backend/requirements.in @@ -16,6 +16,9 @@ opentelemetry-instrumentation-httpx opentelemetry-instrumentation-requests opentelemetry-instrumentation-aiohttp-client opentelemetry-instrumentation-openai +opentelemetry-exporter-otlp-proto-grpc +opentelemetry-exporter-otlp-proto-http + msal azure-keyvault-secrets cryptography diff --git a/app/backend/requirements.txt b/app/backend/requirements.txt index ffacba9668..365f38ca1d 100644 --- a/app/backend/requirements.txt +++ b/app/backend/requirements.txt @@ -18,7 +18,7 @@ anyio==4.3.0 # via # httpx # openai -asgiref==3.7.2 +asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==23.2.0 # via aiohttp @@ -89,7 +89,10 @@ cryptography==42.0.5 # pyjwt # python-jose deprecated==1.2.14 - # via opentelemetry-api + # via + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http distro==1.9.0 # via openai ecdsa==0.18.0 @@ -102,6 +105,12 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal +googleapis-common-protos==1.63.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +grpcio==1.62.1 + # via opentelemetry-exporter-otlp-proto-grpc h11==0.14.0 # via # httpcore @@ -114,7 +123,7 @@ h2==4.1.0 # hypercorn hpack==4.0.0 # via h2 -httpcore==1.0.4 +httpcore==1.0.5 # via httpx httpx[http2]==0.27.0 # via @@ -131,8 +140,10 @@ idna==3.7 # httpx # requests # yarl -importlib-metadata==6.11.0 - # via opentelemetry-api +importlib-metadata==7.0.0 + # via + # opentelemetry-api + # opentelemetry-instrumentation-flask isodate==0.6.1 # via # azure-ai-documentintelligence @@ -174,7 +185,7 @@ microsoft-kiota-serialization-json==1.1.0 # via msgraph-sdk microsoft-kiota-serialization-text==1.0.0 # via msgraph-sdk -msal==1.27.0 +msal==1.28.0 # via # -r requirements.in # azure-identity @@ -198,15 +209,12 @@ numpy==1.26.4 # pandas-stubs oauthlib==3.2.2 # via requests-oauthlib -openai[datalib]==1.13.3 +openai[datalib]==1.16.1 # via -r requirements.in -opentelemetry-api==1.23.0 +opentelemetry-api==1.24.0 # via # azure-core-tracing-opentelemetry # azure-monitor-opentelemetry-exporter - # microsoft-kiota-abstractions - # microsoft-kiota-authentication-azure - # microsoft-kiota-http # opentelemetry-instrumentation # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-asgi @@ -222,7 +230,15 @@ opentelemetry-api==1.23.0 # opentelemetry-instrumentation-urllib3 # opentelemetry-instrumentation-wsgi # opentelemetry-sdk -opentelemetry-instrumentation==0.44b0 +opentelemetry-exporter-otlp-proto-common==1.24.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-exporter-otlp-proto-grpc==1.24.0 + # via -r requirements.in +opentelemetry-exporter-otlp-proto-http==1.24.0 + # via -r requirements.in +opentelemetry-instrumentation==0.45b0 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-asgi @@ -237,48 +253,50 @@ opentelemetry-instrumentation==0.44b0 # opentelemetry-instrumentation-urllib # opentelemetry-instrumentation-urllib3 # opentelemetry-instrumentation-wsgi -opentelemetry-instrumentation-aiohttp-client==0.44b0 +opentelemetry-instrumentation-aiohttp-client==0.45b0 # via -r requirements.in -opentelemetry-instrumentation-asgi==0.44b0 +opentelemetry-instrumentation-asgi==0.45b0 # via # -r requirements.in # opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-dbapi==0.44b0 +opentelemetry-instrumentation-dbapi==0.45b0 # via opentelemetry-instrumentation-psycopg2 -opentelemetry-instrumentation-django==0.44b0 +opentelemetry-instrumentation-django==0.45b0 # via azure-monitor-opentelemetry -opentelemetry-instrumentation-fastapi==0.44b0 +opentelemetry-instrumentation-fastapi==0.45b0 # via azure-monitor-opentelemetry -opentelemetry-instrumentation-flask==0.44b0 +opentelemetry-instrumentation-flask==0.45b0 # via azure-monitor-opentelemetry -opentelemetry-instrumentation-httpx==0.44b0 +opentelemetry-instrumentation-httpx==0.45b0 # via -r requirements.in -opentelemetry-instrumentation-openai==0.13.1 +opentelemetry-instrumentation-openai==0.15.9 # via -r requirements.in -opentelemetry-instrumentation-psycopg2==0.44b0 +opentelemetry-instrumentation-psycopg2==0.45b0 # via azure-monitor-opentelemetry -opentelemetry-instrumentation-requests==0.44b0 +opentelemetry-instrumentation-requests==0.45b0 # via # -r requirements.in # azure-monitor-opentelemetry -opentelemetry-instrumentation-urllib==0.44b0 +opentelemetry-instrumentation-urllib==0.45b0 # via azure-monitor-opentelemetry -opentelemetry-instrumentation-urllib3==0.44b0 +opentelemetry-instrumentation-urllib3==0.45b0 # via azure-monitor-opentelemetry -opentelemetry-instrumentation-wsgi==0.44b0 +opentelemetry-instrumentation-wsgi==0.45b0 # via # opentelemetry-instrumentation-django # opentelemetry-instrumentation-flask +opentelemetry-proto==1.24.0 + # via + # opentelemetry-exporter-otlp-proto-common + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http opentelemetry-resource-detector-azure==0.1.3 # via azure-monitor-opentelemetry -opentelemetry-sdk==1.23.0 +opentelemetry-sdk==1.24.0 # via # azure-monitor-opentelemetry-exporter - # microsoft-kiota-abstractions - # microsoft-kiota-authentication-azure - # microsoft-kiota-http # opentelemetry-resource-detector-azure -opentelemetry-semantic-conventions==0.44b0 +opentelemetry-semantic-conventions==0.45b0 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-asgi @@ -287,14 +305,15 @@ opentelemetry-semantic-conventions==0.44b0 # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-flask # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-openai # opentelemetry-instrumentation-requests # opentelemetry-instrumentation-urllib # opentelemetry-instrumentation-urllib3 # opentelemetry-instrumentation-wsgi # opentelemetry-sdk -opentelemetry-semantic-conventions-ai==0.0.20 +opentelemetry-semantic-conventions-ai==0.1.1 # via opentelemetry-instrumentation-openai -opentelemetry-util-http==0.44b0 +opentelemetry-util-http==0.45b0 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-asgi @@ -306,13 +325,13 @@ opentelemetry-util-http==0.44b0 # opentelemetry-instrumentation-urllib # opentelemetry-instrumentation-urllib3 # opentelemetry-instrumentation-wsgi -packaging==23.2 +packaging==24.0 # via # msal-extensions # opentelemetry-instrumentation-flask pandas==2.2.1 # via openai -pandas-stubs==2.2.0.240218 +pandas-stubs==2.2.1.240316 # via openai pendulum==3.0.0 # via microsoft-kiota-serialization-json @@ -322,13 +341,17 @@ portalocker==2.8.2 # via msal-extensions priority==2.0.0 # via hypercorn -pyasn1==0.5.1 +protobuf==4.25.3 + # via + # googleapis-common-protos + # opentelemetry-proto +pyasn1==0.6.0 # via # python-jose # rsa -pycparser==2.21 +pycparser==2.22 # via cffi -pydantic==2.6.3 +pydantic==2.6.4 # via openai pydantic-core==2.16.3 # via pydantic @@ -352,7 +375,7 @@ python-jose[cryptography]==3.3.0 # via -r requirements.in pytz==2024.1 # via pandas -quart==0.19.4 +quart==0.19.5 # via # -r requirements.in # quart-cors @@ -365,9 +388,10 @@ requests==2.31.0 # azure-core # msal # msrest + # opentelemetry-exporter-otlp-proto-http # requests-oauthlib # tiktoken -requests-oauthlib==1.3.1 +requests-oauthlib==2.0.0 # via msrest rsa==4.9 # via python-jose @@ -398,7 +422,7 @@ types-beautifulsoup4==4.12.0.20240229 # via -r requirements.in types-html5lib==1.1.11.20240228 # via types-beautifulsoup4 -types-pillow==10.2.0.20240213 +types-pillow==10.2.0.20240331 # via -r requirements.in types-pyasn1==0.6.0.20240402 # via types-python-jose @@ -423,9 +447,9 @@ tzdata==2024.1 # pendulum urllib3==2.2.1 # via requests -uvicorn==0.27.1 +uvicorn==0.29.0 # via -r requirements.in -werkzeug==3.0.1 +werkzeug==3.0.2 # via # flask # quart @@ -440,7 +464,7 @@ wsproto==1.2.0 # via hypercorn yarl==1.9.4 # via aiohttp -zipp==3.17.0 +zipp==3.18.1 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/docs/localdev.md b/docs/localdev.md index 252cca0a8c..5657e50df3 100644 --- a/docs/localdev.md +++ b/docs/localdev.md @@ -65,3 +65,7 @@ If you're running inside a dev container, use this local URL instead: ```shell azd env set OPENAI_BASE_URL http://host.docker.internal:8080/v1 ``` + +## (Optional) Running with OpenTelemetry tracing + +To run the service with OpenTelemetry tracing, you can use the local Aspire Dashboard, see [OpenTelemetry support](opentelemetry.md) for details on starting the dashboard. \ No newline at end of file diff --git a/docs/opentelemetry.md b/docs/opentelemetry.md new file mode 100644 index 0000000000..d80ce5e289 --- /dev/null +++ b/docs/opentelemetry.md @@ -0,0 +1,34 @@ +# OpenTelemetry support + +This project has instrumentation for OpenTelemetry. The OpenTelemetry project provides a single set of APIs, libraries, agents, and instrumentation resources to capture distributed traces and metrics from your application. There are two supported options for viewing traces emitted by this application: + +1. Locally using [the .NET Aspire Dashboard](#starting-the-aspire-dashboard) +2. Remotely using [Azure Monitor and Application Insights](../README.md#monitoring-with-application-insights) + +## Starting the .NET Aspire Dashboard + +The .NET Aspire dashboard is an OpenTelemetry dashboard service that can be run locally using Docker. The dashboard is a web application that can be accessed at `http://localhost:18888`. The .NET Aspire Dashboard is designed for local development and testing. Once the container is stopped, any traces, logs and metrics will be destroyed. For persistent logging, see the [Azure Monitor and Application Insights](../README.md#monitoring-with-application-insights) integration. + +You can set a temporary key for OTLP API as an environment variable when starting the container. The key is used to verify that incoming data is from a trusted source. The key is used to authenticate the data source and is not used to authenticate users accessing the dashboard. + +```console +export OTLP_KEY=f142d227-486e-4e80-b7bd-3446e6aa8ea1 # Your own unique key +docker run --rm -it -p 18888:18888 -p 4317:18889 --name aspire-dashboard \ + -e DASHBOARD__OTLP__AUTHMODE='ApiKey' \ + -e DASHBOARD__OTLP__PRIMARYAPIKEY='${OTLP_KEY}' \ + mcr.microsoft.com/dotnet/nightly/aspire-dashboard:8.0-preview +``` + +Once you have started the container, look at the output for a link to the dashboard, the service generates a unique sign-in key that you can use to access the dashboard and prints this to stdout. + +## Starting the service with OpenTelemetry + +To send data to the Aspire dashboard, you need to configure your application to send data to the OpenTelemetry collector. The collector is a service that receives telemetry data from your application and forwards it to the dashboard. + +From the `app/` directory, you can start the service with the following command and additional environment variables: + +```console +$ OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 OTEL_EXPORTER_OTLP_TRACES_API_KEY=${OTLP_KEY} ./start.sh +``` + +This will launch the web services and connect all tracing to the OTLP endpoint running in the dashboard container.