-
Notifications
You must be signed in to change notification settings - Fork 4
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add tests for Log forwarding to Loki #57
Conversation
6c0c9ab
to
c57a67d
Compare
a9d617a
to
e62e37a
Compare
CI will fail due #53, however I tested it manually ant it's working. Here is the diff I use for testing: diff --git a/python/tests/integration/test_spark_job.py b/python/tests/integration/test_spark_job.py
index 4539609..8468248 100644
--- a/python/tests/integration/test_spark_job.py
+++ b/python/tests/integration/test_spark_job.py
@@ -191,80 +191,80 @@ async def test_job_in_history_server(
assert len(apps) == 1
[email protected]_on_fail
[email protected]
-async def test_job_in_prometheus_pushgateway(ops_test: OpsTest, cos):
- if not cos:
- pytest.skip("Not possible to test without cos")
- # check that logs are sent to prometheus pushgateway
-
- show_status_cmd = ["status"]
-
- _, stdout, _ = await ops_test.juju(*show_status_cmd)
-
- logger.info(f"Show status: {stdout}")
-
- status = await ops_test.model.get_status()
- pushgateway_address = status["applications"][PUSHGATEWAY]["units"][
- f"{PUSHGATEWAY}/0"
- ]["address"]
-
- for i in range(0, 5):
- try:
- logger.info(f"try n#{i} time: {time.time()}")
-
- metrics = json.loads(
- urllib.request.urlopen(
- f"http://{pushgateway_address}:9091/api/v1/metrics"
- ).read()
- )
- except Exception:
- break
- if "data" in metrics and len(metrics["data"]) > 0:
- break
- else:
- await juju_sleep(ops_test, 30, HISTORY_SERVER) # type: ignore
+# @pytest.mark.abort_on_fail
+# @pytest.mark.asyncio
+# async def test_job_in_prometheus_pushgateway(ops_test: OpsTest, cos):
+# if not cos:
+# pytest.skip("Not possible to test without cos")
+# # check that logs are sent to prometheus pushgateway
- assert len(metrics["data"]) > 0
+# show_status_cmd = ["status"]
+# _, stdout, _ = await ops_test.juju(*show_status_cmd)
[email protected]_on_fail
[email protected]
-async def test_spark_metrics_in_prometheus(
- ops_test: OpsTest, registry, service_account, cos
-):
- if not cos:
- pytest.skip("Not possible to test without cos")
- show_status_cmd = ["status"]
+# logger.info(f"Show status: {stdout}")
- _, stdout, _ = await ops_test.juju(*show_status_cmd)
+# status = await ops_test.model.get_status()
+# pushgateway_address = status["applications"][PUSHGATEWAY]["units"][
+# f"{PUSHGATEWAY}/0"
+# ]["address"]
- logger.info(f"Show status: {stdout}")
- with ops_test.model_context(COS_ALIAS) as cos_model:
- status = await cos_model.get_status()
- prometheus_address = status["applications"][PROMETHEUS]["units"][
- f"{PROMETHEUS}/0"
- ]["address"]
+# for i in range(0, 5):
+# try:
+# logger.info(f"try n#{i} time: {time.time()}")
- query = json.loads(
- urllib.request.urlopen(
- f"http://{prometheus_address}:9090/api/v1/query?query=push_time_seconds"
- ).read()
- )
+# metrics = json.loads(
+# urllib.request.urlopen(
+# f"http://{pushgateway_address}:9091/api/v1/metrics"
+# ).read()
+# )
+# except Exception:
+# break
+# if "data" in metrics and len(metrics["data"]) > 0:
+# break
+# else:
+# await juju_sleep(ops_test, 30, HISTORY_SERVER) # type: ignore
- logger.info(f"query: {query}")
- spark_id = query["data"]["result"][0]["metric"]["exported_job"]
- logger.info(f"Spark id: {spark_id}")
+# assert len(metrics["data"]) > 0
- driver_pods = get_spark_drivers(
- registry.kube_interface.client, service_account.namespace
- )
- assert len(driver_pods) == 1
- logger.info(f"metadata: {driver_pods[0].metadata}")
- spark_app_selector = driver_pods[0].labels["spark-app-selector"]
- logger.info(f"Spark-app-selector: {spark_app_selector}")
- assert spark_id == spark_app_selector
+# @pytest.mark.abort_on_fail
+# @pytest.mark.asyncio
+# async def test_spark_metrics_in_prometheus(
+# ops_test: OpsTest, registry, service_account, cos
+# ):
+# if not cos:
+# pytest.skip("Not possible to test without cos")
+# show_status_cmd = ["status"]
+
+# _, stdout, _ = await ops_test.juju(*show_status_cmd)
+
+# logger.info(f"Show status: {stdout}")
+# with ops_test.model_context(COS_ALIAS) as cos_model:
+# status = await cos_model.get_status()
+# prometheus_address = status["applications"][PROMETHEUS]["units"][
+# f"{PROMETHEUS}/0"
+# ]["address"]
+
+# query = json.loads(
+# urllib.request.urlopen(
+# f"http://{prometheus_address}:9090/api/v1/query?query=push_time_seconds"
+# ).read()
+# )
+
+# logger.info(f"query: {query}")
+# spark_id = query["data"]["result"][0]["metric"]["exported_job"]
+# logger.info(f"Spark id: {spark_id}")
+
+# driver_pods = get_spark_drivers(
+# registry.kube_interface.client, service_account.namespace
+# )
+# assert len(driver_pods) == 1
+
+# logger.info(f"metadata: {driver_pods[0].metadata}")
+# spark_app_selector = driver_pods[0].labels["spark-app-selector"]
+# logger.info(f"Spark-app-selector: {spark_app_selector}")
+# assert spark_id == spark_app_selector
@pytest.mark.abort_on_fail
diff --git a/releases/3.4/yaml/bundle.yaml.j2 b/releases/3.4/yaml/bundle.yaml.j2
index a51fd82..d13ca3f 100644
--- a/releases/3.4/yaml/bundle.yaml.j2
+++ b/releases/3.4/yaml/bundle.yaml.j2
@@ -58,7 +58,7 @@ applications:
history-server:
charm: spark-history-server-k8s
channel: 3.4/edge
- revision: 30
+ revision: 28
resources:
spark-history-server-image: ghcr.io/canonical/charmed-spark@sha256:321b6deb13f10c045028c9b25264b8113c6fdcbe4f487ff472a06fd7bdcb2758 # 3.4.2
scale: 1
diff --git a/releases/3.4/yaml/overlays/cos-integration.yaml.j2 b/releases/3.4/yaml/overlays/cos-integration.yaml.j2
index c4eb2a0..e4f06b5 100644
--- a/releases/3.4/yaml/overlays/cos-integration.yaml.j2
+++ b/releases/3.4/yaml/overlays/cos-integration.yaml.j2
@@ -75,12 +75,12 @@ relations:
- loki-logging:logging
- - history-server:ingress
- traefik:ingress
-- - history-server:metrics-endpoint
- - agent:metrics-endpoint
-- - history-server:grafana-dashboard
- - agent:grafana-dashboards-consumer
-- - history-server:logging
- - agent:logging-provider
+#- - history-server:metrics-endpoint
+# - agent:metrics-endpoint
+#- - history-server:grafana-dashboard
+# - agent:grafana-dashboards-consumer
+#- - history-server:logging
+# - agent:logging-provider
- - kyuubi:metrics-endpoint
- agent:metrics-endpoint
- - kyuubi:grafana-dashboard and I run the integration test with the following command. tox run -r -e integration-sparkjob -- -m 'not unstable' --backend yaml --cos-model cos --spark-version "3.4.2" --keep-models --debug Full logs can be visible here. |
786c769
to
3825e18
Compare
This PR provides simple tests for checking the that Loki contain some logs from Spark job. Update TF plan and bundles to use history-revision rev33 due #53 and integration-hub rev22, since this revision provides new relation. related: [charmed-spark-rock#114](canonical/charmed-spark-rock#114) Signed-off-by: Robert Gildein <[email protected]>
3825e18
to
6816c74
Compare
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
LGTM! Just some small minor suggestions
PS. Only please add the ticket number in the PR description
Signed-off-by: Robert Gildein <[email protected]>
Why is that needed? I'm using Jira ticket in branch name, so Jira is capable to pick it up. |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Looks good to me!
I left a comment related to the update of the Spark image but I want this to be addressed in a separated PR.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
LGTM! Thanks!
This PR provides simple tests for checking the that Loki contain some
logs from Spark job.
Update TF plan and bundles to use history-revision rev33 due #53 and
integration-hub rev22, since this revision provides new relation.
related: charmed-spark-rock#114