Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,9 @@ jobs:
needs: [build-docker]
steps:
- name: Branch deployment docs
# if this build is for a forked PR or dependabot the images are not pushed to the
# registry and the deploy instructions won't work.
if: ${{ env.use_registry == 'true' }}
run: |
cat >> $GITHUB_STEP_SUMMARY <<EOF

Expand Down
5 changes: 2 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,7 @@ update_cert_fingerprints:
cd $(CERTSSDIR); ./update-certs.sh

update_container_documentation:
chmod +x bin/update_container_documentation.sh
./bin/update_container_documentation.sh
${DOCKER_COMPOSE_TOOLS_CMD} run --rm tools bin/update_container_documentation.sh

update_padded_macs:
chmod +x $(MACSDIR)/update-macs.sh
Expand Down Expand Up @@ -573,7 +572,7 @@ else
docker_host = host-gateway
endif

DOCKER_COMPOSE_DEVELOP_CMD=COMPOSE_FILE=docker/compose.test-runner-develop.yaml:docker/compose.yaml:docker/compose.development.yaml RELEASE=latest docker compose --env-file=docker/defaults.env --env-file=docker/develop.env
DOCKER_COMPOSE_DEVELOP_CMD=docker compose --env-file=docker/defaults.env --env-file=docker/develop.env

# this runs limited live test suite against the development environment to test its sanity
develop-tests development-environment-tests:
Expand Down
9 changes: 9 additions & 0 deletions bin/check.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,15 @@ if [ ! -z "$(git status --porcelain $requirements_files)" ];then
fail=1
fi

documentation_file="documentation/Docker-container-profiles.md"
bin/update_container_documentation.sh
if [ ! -z "$(git diff --word-diff=porcelain -G'^\| ' $documentation_file)" ];then
echo -e "\e[31mThe docker container documentation is not up to date, please run 'make update_container_documentation' and commit the documentation to Git!"
git diff --word-diff=porcelain -G'^\| ' $documentation_file
fail=1
fi


# verify DEBUG can only be enabled when authentication is set (should exit 1 and print error message)
command="docker run -ti --rm -e DEBUG=True -e AUTH_ALL_URLS -e ALLOW_LIST ghcr.io/internetstandards/webserver"
output=$($command)
Expand Down
8 changes: 4 additions & 4 deletions bin/update_container_documentation.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/bin/env sh
#!/usr/bin/env sh

scriptdir=$(dirname "$(readlink -f -- "$0")")
( \
echo "# Docker container profiles overview\n\nThis overview was last generated at $(date -uIseconds|sed 's/+00:00/Z/g') with \`make update_container_documentation\`.\n\n"
docker run --rm --security-opt=no-new-privileges --cap-drop all --network none -v "$PWD/docker/compose.yaml":"/docker/compose.yaml" \
mikefarah/yq:4.45.1 -r '"container|profiles|description","-|-|-",.services|to_entries|map([.key,(.value.profiles //[]|join(", "),(.key|head_comment|split("\n")|join("<br>")))]|join("|"))[]' /docker/compose.yaml \
yq -r '"container|profiles|description","-|-|-",.services|to_entries|map([.key,(.value.profiles //[]|join(", "),(.key|head_comment|split("\n")|join("<br>")))]|join("|"))[]' $scriptdir/../docker/compose.yaml \
| sed 's/$/|/' \
| column -ts"|" -o" | " \
| sed 's/^/| /;s/ $//;2{s/ /-/g}' \
) > documentation/Docker-container-profiles.md
) > $scriptdir/../documentation/Docker-container-profiles.md
26 changes: 14 additions & 12 deletions checks/caa/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,9 @@ def validate_issue_validation_methods(parameter_value: str) -> set[str]:
if validation_method not in ACME_VALIDATION_METHODS and not validation_method.startswith(
ACME_VALIDATION_CUSTOM_PREFIX
):
raise CAAParseError(msg_id="invalid_property_issue_validation_method", context={"value": parameter_value})
raise CAAParseError(
msg_id="invalid-parameter-validation-methods", context={"parameter_value": parameter_value}
)
return validation_methods


Expand Down Expand Up @@ -146,22 +148,22 @@ def validate_property_iodef(value: str):
try:
url = urlparse(value)
except ValueError:
raise CAAParseError(msg_id="invalid_property_iodef_value", context={"value": value})
raise CAAParseError(msg_id="invalid-property-iodef-value", context={"property_value": value})
if url.scheme == "https":
# RFC8659 refers to RFC6546, which is unclear on requirements. Let's assume a netloc is needed.
if not url.netloc:
raise CAAParseError(msg_id="invalid_property_iodef_value", context={"value": value})
raise CAAParseError(msg_id="invalid-property-iodef-value", context={"property_value": value})
elif url.scheme == "mailto":
if not validate_email(url.path):
raise CAAParseError(msg_id="invalid_property_iodef_value", context={"value": value})
raise CAAParseError(msg_id="invalid-property-iodef-value", context={"property_value": value})
else:
raise CAAParseError(msg_id="invalid_property_iodef_value", context={"value": value})
raise CAAParseError(msg_id="invalid-property-iodef-value", context={"property_value": value})


def validate_property_contactemail(value: str):
"""Validate contactemail per CAB BR 1.6.3, requiring a single RFC 6532 3.2 address."""
if not validate_email(value):
raise CAAParseError(msg_id="invalid_property_contactemail_value", context={"value": value})
raise CAAParseError(msg_id="invalid-property-contactemail-value", context={"property_value": value})


@load_grammar_rulelist()
Expand Down Expand Up @@ -212,7 +214,7 @@ def validate_property_contactphone(value: str):
"""Validate contactphone per CAB SC014, requiring an RFC3966 5.1.4 global number."""
parse_result = PhoneNumberRule("global-number").parse_all(value)
if not parse_result:
raise CAAParseError(msg_id="invalid_property_contactphone_value", context={"value": value})
raise CAAParseError(msg_id="invalid-property-contactphone-value", context={"property_value": value})


@load_grammar_rulelist()
Expand Down Expand Up @@ -241,13 +243,13 @@ def validate_property_issuemail(value: str):
"""Validate issuemail property per RFC9495."""
parse_result = CAAPropertyIssueMailRule("issuemail-value").parse_all(value)
if not parse_result:
raise CAAParseError(msg_id="invalid_property_issuemail_value", context={"value": value})
raise CAAParseError(msg_id="invalid-property-issuemail-value", context={"property_value": value})


def validate_flags(flags: int):
"""Validate the flags per RFC8659 4.1, i.e. only allow 0/128"""
if flags not in [0, 128]:
raise CAAParseError(msg_id="invalid_flags_reserved_bits", context={"value": str(flags)})
raise CAAParseError(msg_id="invalid-flags-reserved-bits", context={"flags": str(flags)})


# https://www.iana.org/assignments/pkix-parameters/pkix-parameters.xhtml#caa-properties
Expand All @@ -274,11 +276,11 @@ def validate_caa_record(flags: int, tag: str, value: str) -> None:
try:
validator = CAA_PROPERTY_VALIDATORS[tag.lower()]
if validator is None:
raise CAAParseError(msg_id="invalid_reserved_property", context={"value": tag})
raise CAAParseError(msg_id="invalid-reserved-property", context={"property_tag": tag})
validator(value)
except ParseError as e:
raise CAAParseError(
msg_id="invalid_property_syntax",
msg_id="invalid-property-syntax",
context={
"property_name": tag,
"property_value": value,
Expand All @@ -287,4 +289,4 @@ def validate_caa_record(flags: int, tag: str, value: str) -> None:
},
)
except KeyError:
raise CAAParseError(msg_id="invalid_unknown_property", context={"value": tag})
raise CAAParseError(msg_id="invalid-unknown-property", context={"property_tag": tag})
4 changes: 2 additions & 2 deletions checks/caa/retrieval.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from checks.resolver import dns_resolve_caa
from checks.tasks.shared import TranslatableTechTableItem

CAA_MSGID_INSUFFICIENT_POLICY = "missing_required_tag"
CAA_MSGID_INSUFFICIENT_POLICY = "missing-required-property-issue"
CAA_TAGS_REQUIRED = {"issue"}
CAA_MAX_RECORDS = 1000

Expand Down Expand Up @@ -44,7 +44,7 @@ def __post_init__(self, caa_records: Iterable[CAA]):

missing_tags = CAA_TAGS_REQUIRED - self.caa_tags
for tag in missing_tags:
self.errors.append(TranslatableTechTableItem(CAA_MSGID_INSUFFICIENT_POLICY, {"tag": tag}))
self.errors.append(TranslatableTechTableItem(CAA_MSGID_INSUFFICIENT_POLICY, {"property_tag": tag}))

@property
def score(self) -> int:
Expand Down
6 changes: 3 additions & 3 deletions checks/caa/tests/test_retrieval.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,16 @@ def test_caa_evaluation():
evaluation = CAAEvaluation(caa_found=True, canonical_name="example.com", caa_records=caa_records)
assert evaluation.errors == [
TranslatableTechTableItem(
"invalid_property_syntax",
"invalid-property-syntax",
{
"property_name": "issuewild",
"property_value": "\x08",
"invalid_character_position": 0,
"invalid_character": "\x08",
},
),
TranslatableTechTableItem("invalid_unknown_property", {"value": "unknown"}),
TranslatableTechTableItem("missing_required_tag", {"tag": "issue"}),
TranslatableTechTableItem("invalid-unknown-property", {"property_tag": "unknown"}),
TranslatableTechTableItem("missing-required-property-issue", {"property_tag": "issue"}),
]
assert evaluation.caa_records_str == ['0 issuewild "\\008"', '0 unknown ";"']
assert evaluation.caa_tags == {"issuewild", "unknown"}
4 changes: 2 additions & 2 deletions checks/categories.py
Original file line number Diff line number Diff line change
Expand Up @@ -1341,7 +1341,7 @@ def result_bad(self, tech_data: list[dict[str, str]]):

def result_syntax_error(self, tech_data: list[dict[str, str]]):
self._status(STATUS_FAIL)
self.verdict = "detail web tls caa verdict syntax_error"
self.verdict = "detail web tls caa verdict syntax-error"
self.tech_data = self.add_tech_data_translation_root(tech_data) or ""

def result_insufficient(self, tech_data: list[dict[str, str]]):
Expand Down Expand Up @@ -1983,7 +1983,7 @@ def result_bad(self, tech_data: list[dict[str, str]]):

def result_syntax_error(self, tech_data: list[dict[str, str]]):
self._status(STATUS_FAIL)
self.verdict = "detail mail tls caa verdict syntax_error"
self.verdict = "detail mail tls caa verdict syntax-error"
self.tech_data = self.add_tech_data_translation_root(tech_data) or ""

def result_insufficient(self, tech_data: list[dict[str, str]]):
Expand Down
179 changes: 179 additions & 0 deletions checks/migrations/0019_hall_of_fame_plus_triggers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
# Partly generated by Django 4.2.20 on 2025-05-25 15:24 together with manual RunSQL

from django.db import migrations, models
from django.db.models import Case, F, Q, Value, When
from django.db.models.functions import Greatest
from django.db.models.lookups import GreaterThan
import django.db.models.deletion
import pgtrigger.compiler
import pgtrigger.migrations


class Migration(migrations.Migration):
dependencies = [
("checks", "0018_domaintesttls_caa_records"),
]

operations = [
# Note db_index is False on the ForeignKey to prevent extra indices that are not needed
# AutoField has to be primary key in Django, to solve this manually alter SQL this field
# see https://github.com/django/django/blob/787f3130f751283140fe2be8188eb5299552232d/django/db/models/fields/__init__.py#L2801
migrations.CreateModel(
name="Fame",
fields=[
("id", models.IntegerField(serialize=False, verbose_name="ID")),
("domain", models.CharField(max_length=255, primary_key=True, serialize=False)),
(
"site_report",
models.ForeignKey(
db_index=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="checks.domaintestreport",
),
),
("site_report_timestamp", models.DateTimeField(null=True)),
(
"mail_report",
models.ForeignKey(
db_index=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="checks.mailtestreport",
),
),
("mail_report_timestamp", models.DateTimeField(null=True)),
],
),
migrations.AddIndex(
model_name="fame",
index=models.Index(
condition=models.Q(site_report_id__isnull=False),
fields=["-site_report_timestamp", "domain", "site_report_id"],
name="checks_fame_sites_idx",
),
),
migrations.AddIndex(
model_name="fame",
index=models.Index(
condition=models.Q(mail_report_id__isnull=False),
fields=["-mail_report_timestamp", "domain", "mail_report_id"],
name="checks_fame_mail_idx",
),
),
migrations.AddIndex(
model_name="fame",
index=models.Index(
models.OrderBy(
Greatest("site_report_timestamp", "mail_report_timestamp"),
descending=True,
),
"domain",
Case(
models.When(
GreaterThan(F("site_report_timestamp"), F("mail_report_timestamp")),
then=Value("s"),
),
default=Value("m"),
output_field=models.CharField(max_length=1),
),
Case(
When(
GreaterThan(F("site_report_timestamp"), F("mail_report_timestamp")),
then="site_report_id",
),
default="mail_report_id",
),
condition=Q(site_report_id__isnull=False) & Q(mail_report_id__isnull=False),
name="checks_fame_champions_idx",
),
),
pgtrigger.migrations.AddTrigger(
model_name="domaintestreport",
trigger=pgtrigger.compiler.Trigger(
name="update_fame_on_site_report",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="""
IF NEW.score IS NULL THEN
-- DO NOTHING
ELSIF NEW.score = 100 THEN
INSERT INTO checks_fame (domain, site_report_id, site_report_timestamp, mail_report_id, mail_report_timestamp)
VALUES (NEW.domain, NEW.id, NEW.timestamp, NULL, NULL)
ON CONFLICT (domain)
DO UPDATE SET site_report_id = NEW.id, site_report_timestamp = NEW.timestamp;
ELSE
MERGE INTO ONLY checks_fame c1
USING checks_fame c2 ON c1.domain = c2.domain AND c1.domain = NEW.domain
WHEN NOT MATCHED THEN
DO NOTHING
WHEN MATCHED AND c1.mail_report_id IS NOT NULL THEN
UPDATE SET site_report_id = NULL, site_report_timestamp = NULL
WHEN MATCHED AND c1.mail_report_id IS NULL THEN
DELETE;
END IF;
RETURN NEW;
""",
hash="b4f792b06123914de71b57669c202a19b04e9e9c",
operation='INSERT OR UPDATE OF "score"',
pgid="pgtrigger_update_fame_on_site_report_e4fdc",
table="checks_domaintestreport",
when="AFTER",
),
),
),
pgtrigger.migrations.AddTrigger(
model_name="mailtestreport",
trigger=pgtrigger.compiler.Trigger(
name="update_fame_on_mail_report",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="""
IF NEW.score IS NULL THEN
-- DO NOTHING
ELSIF NEW.score = 100 THEN
INSERT INTO checks_fame (domain, site_report_id, site_report_timestamp, mail_report_id, mail_report_timestamp)
VALUES (NEW.domain, NULL, NULL, NEW.id, NEW.timestamp)
ON CONFLICT (domain)
DO UPDATE SET mail_report_id = NEW.id, mail_report_timestamp = NEW.timestamp;
ELSE
MERGE INTO ONLY checks_fame c1
USING checks_fame c2 ON c1.domain = c2.domain AND c1.domain = NEW.domain
WHEN NOT MATCHED THEN
DO NOTHING
WHEN MATCHED AND c1.site_report_id IS NOT NULL THEN
UPDATE SET mail_report_id = NULL, mail_report_timestamp = NULL
WHEN MATCHED AND c1.site_report_id IS NULL THEN
DELETE;
END IF;
RETURN NEW;
""",
hash="707aefc7a83dd041dd815511f1d1cf7e8f84f944",
operation='INSERT OR UPDATE OF "score"',
pgid="pgtrigger_update_fame_on_mail_report_b3a27",
table="checks_mailtestreport",
when="AFTER",
),
),
),
migrations.RunSQL(
sql=[
'ALTER TABLE "checks_fame" ALTER COLUMN "id" ADD GENERATED BY DEFAULT AS IDENTITY;',
"""
WITH
site_fame AS (
SELECT domain, id AS site_report_id, timestamp AS site_report_timestamp FROM (
SELECT domain, score, id, timestamp, rank() OVER (PARTITION BY domain ORDER BY id DESC) FROM checks_domaintestreport
) alias WHERE rank = 1 AND score = 100),
mail_fame AS (
SELECT domain, id AS mail_report_id, timestamp AS mail_report_timestamp FROM (
SELECT domain, score, id, timestamp, rank() OVER (PARTITION BY domain ORDER BY id DESC) FROM checks_mailtestreport
) alias WHERE rank = 1 AND score = 100)
INSERT INTO checks_fame (domain, site_report_id, site_report_timestamp, mail_report_id, mail_report_timestamp)
SELECT * FROM site_fame FULL OUTER JOIN mail_fame USING (domain);
""",
],
reverse_sql=[
'DELETE FROM "checks_fame";',
'ALTER TABLE "checks_fame" ALTER COLUMN "id" DROP IDENTITY;',
],
),
]
Loading