Skip to content

Commit 9fd6deb

Browse files
authored
Merge branch 'main' into ishabi/endpoint-resource-renaming
2 parents fdbb31d + 880d770 commit 9fd6deb

File tree

20 files changed

+177
-123
lines changed

20 files changed

+177
-123
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,6 @@ jobs:
6666
strategy:
6767
matrix:
6868
include: ${{ fromJson(needs.compute_libraries_and_scenarios.outputs.library_matrix) }}
69-
7069
fail-fast: false
7170
uses: ./.github/workflows/system-tests.yml
7271
permissions:
@@ -87,6 +86,7 @@ jobs:
8786
library: ${{ matrix.library }}
8887
scenarios: ${{ needs.compute_libraries_and_scenarios.outputs.scenarios }}
8988
scenarios_groups: ${{ needs.compute_libraries_and_scenarios.outputs.scenarios_groups }}
89+
excluded_scenarios: OTEL_COLLECTOR_E2E # rely on real backend
9090
parametric_job_count: ${{ matrix.version == 'dev' && 2 || 1 }} # test both use cases
9191
skip_empty_scenarios: true
9292
display_summary: true

.github/workflows/lint.yml

Lines changed: 5 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,11 @@ jobs:
1313
uses: ./.github/actions/install_runner
1414
- run: source venv/bin/activate
1515
shell: bash
16-
- name: Black, pylint, tailing whitespaces, and yaml checks
16+
- name: Install node
17+
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
18+
with:
19+
node-version: 20
20+
- name: Black, pylint, tailing whitespaces, yaml, shellcheck, and language-specific checks
1721
shell: bash
1822
run: ./format.sh --check
1923
- if: ${{ failure() }}
@@ -24,25 +28,3 @@ jobs:
2428
exit 1
2529
2630
'
27-
- name: 'Install shellcheck'
28-
shell: bash
29-
run: sudo apt-get install -y shellcheck
30-
- name: 'Run shellcheck'
31-
shell: bash
32-
run: ./utils/scripts/shellcheck.sh
33-
- name: Install node
34-
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
35-
with:
36-
node-version: 20
37-
- name: 'Run nodejs express lint'
38-
shell: bash
39-
working-directory: ./utils/build/docker/nodejs/express
40-
run: |
41-
npm install
42-
npm run lint
43-
- name: 'Run nodejs fastify lint'
44-
shell: bash
45-
working-directory: ./utils/build/docker/nodejs/fastify
46-
run: |
47-
npm install
48-
npm run lint

.github/workflows/run-end-to-end.yml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -472,6 +472,11 @@ jobs:
472472
- name: Run OTEL_COLLECTOR scenario
473473
if: always() && steps.build.outcome == 'success' && contains(inputs.scenarios, '"OTEL_COLLECTOR"')
474474
run: ./run.sh OTEL_COLLECTOR
475+
- name: Run OTEL_COLLECTOR_E2E scenario
476+
if: always() && steps.build.outcome == 'success' && contains(inputs.scenarios, '"OTEL_COLLECTOR_E2E"')
477+
run: ./run.sh OTEL_COLLECTOR_E2E
478+
env:
479+
DD_API_KEY: ${{ secrets.DD_API_KEY }}
475480

476481
- name: Run all scenarios in replay mode
477482
if: success() && steps.build.outcome == 'success' && inputs._enable_replay_scenarios

.github/workflows/system-tests.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ jobs:
187187
name: Build end-to-end (${{ matrix.weblog.name }})
188188
needs:
189189
- compute_parameters
190-
if: ${{ needs.compute_parameters.outputs.endtoend_defs_parallel_enable == 'true' }}
190+
if: ${{ needs.compute_parameters.outputs.endtoend_defs_parallel_enable == 'true' && needs.compute_parameters.outputs.endtoend_defs_parallel_weblogs != '[]'}}
191191
runs-on: ubuntu-latest
192192
strategy:
193193
matrix:
@@ -249,7 +249,7 @@ jobs:
249249
needs:
250250
- compute_parameters
251251
- build_end_to_end
252-
if: ${{ needs.compute_parameters.outputs.endtoend_defs_parallel_enable == 'true' }}
252+
if: ${{ always() && needs.build_end_to_end.result != 'failure' && needs.compute_parameters.outputs.endtoend_defs_parallel_enable == 'true' }}
253253
strategy:
254254
matrix:
255255
job: ${{ fromJson(needs.compute_parameters.outputs.endtoend_defs_parallel_jobs) }}

.shellcheck

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,4 +33,5 @@ TODO=(
3333
utils/interfaces/schemas/serve.sh
3434
build.sh
3535
format.sh
36+
*node_modules*
3637
)

format.sh

Lines changed: 24 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -44,12 +44,6 @@ if ! mypy --config pyproject.toml; then
4444
exit 1
4545
fi
4646

47-
echo "Running ruff checks..."
48-
if ! which ruff > /dev/null; then
49-
echo "ruff is not installed, installing it (ETA 5s)"
50-
./build.sh -i runner > /dev/null
51-
fi
52-
5347
echo "Running ruff formatter..."
5448
if [ "$COMMAND" == "fix" ]; then
5549
ruff format
@@ -133,11 +127,6 @@ else
133127
fi
134128
135129
echo "Running yamllint checks..."
136-
if ! which ./venv/bin/yamllint > /dev/null; then
137-
echo "yamllint is not installed, installing it (ETA 60s)"
138-
./build.sh -i runner > /dev/null
139-
fi
140-
141130
if ! ./venv/bin/yamllint -s manifests/; then
142131
echo "yamllint checks failed. Please fix the errors above. 💥 💔 💥"
143132
exit 1
@@ -149,5 +138,29 @@ if ! python ./manifests/parser/core.py; then
149138
exit 1
150139
fi
151140
141+
echo "Running shellcheck checks..."
142+
if ! ./utils/scripts/shellcheck.sh; then
143+
echo "shellcheck checks failed. Please fix the errors above. 💥 💔 💥"
144+
exit 1
145+
fi
146+
147+
echo "Running language-specific linters..."
148+
# This will not run if npm is not installed as written and there is no "install" step today
149+
# TODO: Install node as part of this script
150+
if which npm > /dev/null; then
151+
echo "Running Node.js linters"
152+
153+
# currently only fastify requires linting
154+
# this can be added later
155+
nodejs_dirs=("express" "fastify")
156+
157+
for dir in "${nodejs_dirs[@]}"; do
158+
if ! NODE_NO_WARNINGS=1 npm --prefix ./utils/build/docker/nodejs/"$dir" install --silent && npm --prefix ./utils/build/docker/nodejs/"$dir" run --silent lint; then
159+
echo "$dir linter failed. Please fix the errors above. 💥 💔 💥"
160+
exit 1
161+
fi
162+
done
163+
fi
164+
152165
153166
echo "All good, the system-tests CI will be happy! ✨ 🍰 ✨"

manifests/java.yml

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -72,19 +72,24 @@ tests/:
7272
vertx4: v1.51.0
7373
test_custom_data_classification.py:
7474
Test_API_Security_Custom_Data_Classification_Capabilities:
75-
'*': missing_feature
75+
'*': v1.55.0
76+
akka-http: bug (APPSEC-56888)
7677
spring-boot-3-native: irrelevant (GraalVM. Tracing support only)
7778
Test_API_Security_Custom_Data_Classification_Multiple_Scanners:
78-
'*': missing_feature
79+
'*': v1.55.0
80+
akka-http: bug (APPSEC-56888)
7981
spring-boot-3-native: irrelevant (GraalVM. Tracing support only)
8082
Test_API_Security_Custom_Data_Classification_Negative:
81-
'*': missing_feature
83+
'*': v1.55.0
84+
akka-http: bug (APPSEC-56888)
8285
spring-boot-3-native: irrelevant (GraalVM. Tracing support only)
8386
Test_API_Security_Custom_Data_Classification_Processor_Override:
84-
'*': missing_feature
87+
'*': v1.55.0
88+
akka-http: bug (APPSEC-56888)
8589
spring-boot-3-native: irrelevant (GraalVM. Tracing support only)
8690
Test_API_Security_Custom_Data_Classification_Scanner:
87-
'*': missing_feature
91+
'*': v1.55.0
92+
akka-http: bug (APPSEC-56888)
8893
spring-boot-3-native: irrelevant (GraalVM. Tracing support only)
8994
test_endpoint_discovery.py:
9095
Test_Endpoint_Discovery:

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ ruff==0.8.1
3636
scp==0.14.5
3737
semantic-version==2.10.0
3838
setuptools==75.8.0
39+
shellcheck-py==0.11.0.1
3940
types-aiofiles==24.1.0.20241221
4041
types-protobuf==5.29.1.20241207
4142
types-python-dateutil==2.9.0.20241206

tests/otel_postgres_metrics_e2e/test_postgres_metrics.py

Lines changed: 36 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -22,46 +22,46 @@
2222
"postgresql.rollbacks": {"data_type": "Sum", "description": "The number of rollbacks"},
2323
"postgresql.db_size": {"data_type": "Sum", "description": "The database disk usage"},
2424
"postgresql.table.count": {"data_type": "Sum", "description": "Number of user tables in a database"},
25-
"postgresql.backends": {"data_type": "Sum", "description": "The number of backends"},
26-
"postgresql.bgwriter.buffers.allocated": {"data_type": "Sum", "description": "Number of buffers allocated"},
27-
"postgresql.bgwriter.buffers.writes": {"data_type": "Sum", "description": "Number of buffers written"},
28-
"postgresql.bgwriter.checkpoint.count": {"data_type": "Sum", "description": "The number of checkpoints performed"},
29-
"postgresql.bgwriter.duration": {
30-
"data_type": "Sum",
31-
"description": "Total time spent writing and syncing files to disk by checkpoints",
32-
},
33-
"postgresql.bgwriter.maxwritten": {
34-
"data_type": "Sum",
35-
"description": "Number of times the background writer stopped a cleaning scan because it had written too many buffers",
36-
},
25+
# "postgresql.backends": {"data_type": "Sum", "description": "The number of backends"},
26+
# "postgresql.bgwriter.buffers.allocated": {"data_type": "Sum", "description": "Number of buffers allocated"},
27+
# "postgresql.bgwriter.buffers.writes": {"data_type": "Sum", "description": "Number of buffers written"},
28+
# "postgresql.bgwriter.checkpoint.count": {"data_type": "Sum", "description": "The number of checkpoints performed"},
29+
# "postgresql.bgwriter.duration": {
30+
# "data_type": "Sum",
31+
# "description": "Total time spent writing and syncing files to disk by checkpoints",
32+
# },
33+
# "postgresql.bgwriter.maxwritten": {
34+
# "data_type": "Sum",
35+
# "description": "Number of times the background writer stopped a cleaning scan because it had written too many buffers",
36+
# },
3737
# Optional metrics (enabled in otelcol-config-with-postgres.yaml)
38-
"postgresql.blks_hit": {
39-
"data_type": "Sum",
40-
"description": "Number of times disk blocks were found already in the buffer cache",
41-
},
42-
"postgresql.blks_read": {"data_type": "Sum", "description": "Number of disk blocks read in this database"},
38+
# "postgresql.blks_hit": {
39+
# "data_type": "Sum",
40+
# "description": "Number of times disk blocks were found already in the buffer cache",
41+
# },
42+
# "postgresql.blks_read": {"data_type": "Sum", "description": "Number of disk blocks read in this database"},
4343
"postgresql.database.locks": {"data_type": "Gauge", "description": "The number of database locks"},
4444
"postgresql.deadlocks": {"data_type": "Sum", "description": "The number of deadlocks"},
45-
"postgresql.temp.io": {
46-
"data_type": "Sum",
47-
"description": "Total amount of data written to temporary files by queries",
48-
},
45+
# "postgresql.temp.io": {
46+
# "data_type": "Sum",
47+
# "description": "Total amount of data written to temporary files by queries",
48+
# },
4949
"postgresql.temp_files": {"data_type": "Sum", "description": "The number of temp files"},
50-
"postgresql.tup_deleted": {"data_type": "Sum", "description": "Number of rows deleted by queries in the database"},
51-
"postgresql.tup_fetched": {"data_type": "Sum", "description": "Number of rows fetched by queries in the database"},
52-
"postgresql.tup_inserted": {
53-
"data_type": "Sum",
54-
"description": "Number of rows inserted by queries in the database",
55-
},
56-
"postgresql.tup_returned": {
57-
"data_type": "Sum",
58-
"description": "Number of rows returned by queries in the database",
59-
},
60-
"postgresql.tup_updated": {"data_type": "Sum", "description": "Number of rows updated by queries in the database"},
61-
"postgresql.function.calls": {
62-
"data_type": "Sum",
63-
"description": "The number of calls made to a function. Requires `track_functions=pl|all` in Postgres config.",
64-
},
50+
# "postgresql.tup_deleted": {"data_type": "Sum", "description": "Number of rows deleted by queries in the database"},
51+
# "postgresql.tup_fetched": {"data_type": "Sum", "description": "Number of rows fetched by queries in the database"},
52+
# "postgresql.tup_inserted": {
53+
# "data_type": "Sum",
54+
# "description": "Number of rows inserted by queries in the database",
55+
# },
56+
# "postgresql.tup_returned": {
57+
# "data_type": "Sum",
58+
# "description": "Number of rows returned by queries in the database",
59+
# },
60+
# "postgresql.tup_updated": {"data_type": "Sum", "description": "Number of rows updated by queries in the database"},
61+
# "postgresql.function.calls": {
62+
# "data_type": "Sum",
63+
# "description": "The number of calls made to a function. Requires `track_functions=pl|all` in Postgres config.",
64+
# },
6565
"postgresql.sequential_scans": {"data_type": "Sum", "description": "The number of sequential scans"},
6666
"postgresql.table.size": {"data_type": "Sum", "description": "Disk space used by a table."},
6767
"postgresql.rows": {"data_type": "Sum", "description": "The number of rows in the database"},

tests/parametric/test_headers_b3multi.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def enable_case_insensitive_b3multi() -> pytest.MarkDecorator:
3636
return parametrize("library_env", [env1, env2])
3737

3838

39-
@features.b3_headers_propagation
39+
@features.b3multi_headers_propagation
4040
@scenarios.parametric
4141
class Test_Headers_B3multi:
4242
@enable_b3multi()

0 commit comments

Comments
 (0)