Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
c03236a
docs: ADR for re-processing of documents
ctron Aug 5, 2025
c6826e2
chore: initial PoC impl
ctron Aug 6, 2025
1274bf9
docs: refine option one as the preferred one
ctron Aug 7, 2025
c9ee490
refactor: pull out common code for creating a storage backend
ctron Sep 9, 2025
08cb6a0
feat: allow running data migrations as part of migrations
ctron Sep 9, 2025
b3b6c6a
chore: make concurrency configurable
ctron Sep 10, 2025
4ad816e
feat: add a way to run data migrations individually
ctron Sep 12, 2025
3ca0669
feat: add way to run data migrations from main binary
ctron Sep 19, 2025
5578da3
feat: allow spreading load across runners
ctron Sep 22, 2025
42ceceb
refactor: extract method
ctron Oct 1, 2025
116cc28
chore: test example
ctron Oct 7, 2025
cc95b67
chore: show number of processed documents
ctron Oct 9, 2025
d92744d
chore: add advisory to example
ctron Oct 10, 2025
26d7d87
feat: ingest scores
ctron Oct 10, 2025
1cc411f
chore: continue work on PoC
ctron Oct 14, 2025
70c1d46
chore: add foreign key
ctron Oct 14, 2025
cfe1450
chore: implement for csaf
ctron Oct 15, 2025
eaf04f1
feat: use cvss library to parse cve scores
dejanb Oct 14, 2025
3fe0a80
chore: mop up some things
ctron Oct 15, 2025
4e3c969
chore: refactor code about ingesting new scores
ctron Oct 15, 2025
bc33b5c
chore: also use iden type for dropping
ctron Oct 16, 2025
91abee4
chore: align column types
ctron Oct 16, 2025
232d96c
impove cvss mapping logic
dejanb Oct 16, 2025
70b8c4a
chore: implement for csaf with v2 score
ctron Oct 20, 2025
dfceb1b
test: pass on storage from tests to migrator
ctron Oct 24, 2025
b7a54b7
fix: skip inserting when empty, leads to faulty SQL
ctron Oct 24, 2025
3b83e5d
refactor: rename test so drop the "example"
ctron Oct 24, 2025
5a13785
chore: format
ctron Oct 24, 2025
b13c75a
test: add test for sbom
ctron Oct 27, 2025
6c05d36
chore: bring back sbom properties
ctron Oct 27, 2025
29aa6e0
docs: brush up a bit
ctron Oct 27, 2025
9cf4710
test: sort order, to make test stable
ctron Oct 28, 2025
844476f
chore: add additional SBOM column
ctron Oct 28, 2025
a9e3039
chore: update example
ctron Oct 29, 2025
26b9530
chore: adapt CSAF CVSS v3 handling to use JSON deserialization
dejanb Oct 29, 2025
6745754
chore: only create the type if it doesn't exist
ctron Oct 29, 2025
5ca5e41
test: the cvss score parsing should no longer fail
ctron Oct 29, 2025
2355f9e
refactor: pull out check if the migration should be ignored
ctron Oct 30, 2025
5729748
docs: document the test case a bit
ctron Oct 30, 2025
3903186
test: add a test for re-running m0002010
ctron Oct 31, 2025
200b210
docs: improve on documentation
ctron Nov 10, 2025
d5e4619
refactor: make common function
ctron Nov 10, 2025
ed727dd
refactor: simplify things a bit
ctron Nov 10, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
.idea
.DS_Store
/data
.trustify
/target
/.dockerignore
/Containerfile
42 changes: 37 additions & 5 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ csaf = { version = "0.5.0", default-features = false }
csaf-walker = { version = "0.14.1", default-features = false }
csv = "1.3.0"
cve = "0.4.0"
cvss = { git = "https://github.com/dejanb/cvss" }
cvss-old = { package = "cvss", version = "2" }
deepsize = "0.2.0"
fixedbitset = "0.5.7"
flate2 = "1.0.35"
Expand All @@ -77,6 +79,7 @@ http = "1"
human-date-parser = "0.3"
humantime = "2"
humantime-serde = "1"
indicatif = "0.18.0"
itertools = "0.14"
jsn = "0.14"
json-merge-patch = "0.0.1"
Expand Down Expand Up @@ -203,7 +206,7 @@ postgresql_commands = { version = "0.20.0", default-features = false, features =
# required due to https://github.com/KenDJohnson/cpe-rs/pull/15
#cpe = { git = "https://github.com/ctron/cpe-rs", rev = "c3c05e637f6eff7dd4933c2f56d070ee2ddfb44b" }
# required due to https://github.com/voteblake/csaf-rs/pull/29
csaf = { git = "https://github.com/trustification/csaf-rs" }
csaf = { git = "https://github.com/trustification/csaf-rs", branch = "cvss" }
# required due to https://github.com/gcmurphy/osv/pull/58
#osv = { git = "https://github.com/ctron/osv", branch = "feature/drop_deps_1" }

Expand Down
22 changes: 22 additions & 0 deletions Containerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
FROM registry.access.redhat.com/ubi9/ubi:latest AS builder

RUN dnf install --setop install_weak_deps=false --nodocs -y git python gcc g++ cmake ninja-build openssl-devel xz

RUN curl https://sh.rustup.rs -sSf | bash -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"

RUN mkdir /build

COPY . /build

WORKDIR /build

RUN ls

RUN rm rust-toolchain.toml

RUN cargo build --release

FROM registry.access.redhat.com/ubi9/ubi-minimal:latest

COPY --from=builder /build/target/release/trustd /usr/local/bin/
4 changes: 4 additions & 0 deletions TODO.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# ToDo

* [ ] Allowing skipping data part of the migration
* [x] Allow concurrent instances (x of y)
3 changes: 2 additions & 1 deletion common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ deepsize = { workspace = true }
hex = { workspace = true }
hide = { workspace = true }
human-date-parser = { workspace = true }
humantime = { workspace = true }
itertools = { workspace = true }
lenient_semver = { workspace = true }
log = { workspace = true }
Expand All @@ -32,6 +33,7 @@ sea-orm = { workspace = true, features = ["sea-query-binder", "sqlx-postgres", "
sea-orm-migration = { workspace = true }
sea-query = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde-cyclonedx = { workspace = true }
serde_json = { workspace = true }
spdx-expression = { workspace = true }
spdx-rs = { workspace = true }
Expand All @@ -45,7 +47,6 @@ urlencoding = { workspace = true }
utoipa = { workspace = true, features = ["url"] }
uuid = { workspace = true, features = ["v5", "serde"] }
walker-common = { workspace = true, features = ["bzip2", "liblzma", "flate2"] }
humantime = { workspace = true }

[dev-dependencies]
chrono = { workspace = true }
Expand Down
5 changes: 5 additions & 0 deletions common/db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ pub mod embedded;

use anyhow::{Context, anyhow, ensure};
use migration::Migrator;
use migration::data::Runner;
use postgresql_commands::{CommandBuilder, psql::PsqlBuilder};
use sea_orm::{ConnectionTrait, Statement};
use sea_orm_migration::prelude::MigratorTrait;
Expand Down Expand Up @@ -121,4 +122,8 @@ impl<'a> Database<'a> {

Ok(db)
}

pub async fn data_migrate(&self, runner: Runner) -> Result<(), anyhow::Error> {
runner.run::<Migrator>().await
}
}
27 changes: 27 additions & 0 deletions common/src/advisory/cyclonedx.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
use serde_cyclonedx::cyclonedx::v_1_6::CycloneDx;
use std::collections::HashMap;

/// extract CycloneDX SBOM general purpose properties
pub fn extract_properties(sbom: &CycloneDx) -> HashMap<String, Option<String>> {
sbom.properties
.iter()
.flatten()
.map(|e| (e.name.clone(), e.value.clone()))
.collect()
}

/// extract CycloneDX SBOM general purpose properties, convert into [`serde_json::Value`]
pub fn extract_properties_json(sbom: &CycloneDx) -> serde_json::Value {
serde_json::Value::Object(
extract_properties(sbom)
.into_iter()
.map(|(k, v)| {
(
k,
v.map(serde_json::Value::String)
.unwrap_or(serde_json::Value::Null),
)
})
.collect(),
)
}
2 changes: 2 additions & 0 deletions common/src/advisory/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
pub mod cyclonedx;

use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use utoipa::ToSchema;
Expand Down
37 changes: 37 additions & 0 deletions common/src/db/create.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
use sea_orm::{ConnectionTrait, DbErr};
use sea_orm_migration::SchemaManager;
use sea_query::{IntoIden, extension::postgres::Type};

/// create a type, if it not already exists
///
/// This is required as Postgres doesn't support `CREATE TYPE IF NOT EXISTS`
pub async fn create_enum_if_not_exists<T, I>(
manager: &SchemaManager<'_>,
name: impl IntoIden + Clone,
values: I,
) -> Result<(), DbErr>
where
T: IntoIden,
I: IntoIterator<Item = T>,
{
let builder = manager.get_connection().get_database_backend();
let r#type = name.clone().into_iden();
let stmt = builder.build(Type::create().as_enum(name).values(values));
let stmt = format!(
r#"
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_type WHERE typname = '{name}'
) THEN
{stmt};
END IF;
END$$;
"#,
name = r#type.to_string()
);

manager.get_connection().execute_unprepared(&stmt).await?;

Ok(())
}
7 changes: 7 additions & 0 deletions common/src/db/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@ pub mod limiter;
pub mod multi_model;
pub mod query;

mod create;
mod func;

pub use create::*;
pub use func::*;

use anyhow::Context;
Expand Down Expand Up @@ -103,6 +106,10 @@ impl Database {
pub fn name(&self) -> &str {
&self.name
}

pub fn into_connection(self) -> DatabaseConnection {
self.db
}
}

impl Deref for Database {
Expand Down
80 changes: 80 additions & 0 deletions data-migration.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
kind: Job
apiVersion: batch/v1
metadata:
name: data-migration-test
spec:
completions: 4
completionMode: Indexed
parallelism: 4 # same as completions
template:
spec:
restartPolicy: OnFailure
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchExpressions:
- key: "kubernetes.io/arch"
operator: In
values: ["amd64"]
containers:
- name: run
image: quay.io/ctrontesting/trustd:latest
imagePullPolicy: Always
command:
- /usr/local/bin/trustd
- db
- data
- m0002010_add_advisory_scores # name of the migration
env:
- name: MIGRATION_DATA_CONCURRENT
value: "5" # in-process parallelism
- name: MIGRATION_DATA_TOTAL_RUNNER
value: "4" # same as completions
- name: MIGRATION_DATA_CURRENT_RUNNER
valueFrom:
fieldRef:
fieldPath: metadata.annotations['batch.kubernetes.io/job-completion-index']

- name: TRUSTD_STORAGE_STRATEGY
value: s3
- name: TRUSTD_S3_ACCESS_KEY
valueFrom:
secretKeyRef:
name: storage-credentials
key: aws_access_key_id
- name: TRUSTD_S3_SECRET_KEY
valueFrom:
secretKeyRef:
name: storage-credentials
key: aws_secret_access_key
- name: TRUSTD_S3_REGION
valueFrom:
configMapKeyRef:
name: aws-storage
key: region
- name: TRUSTD_S3_BUCKET
value: trustify-default

- name: TRUSTD_DB_NAME
value: trustify_default
- name: TRUSTD_DB_USER
valueFrom:
secretKeyRef:
name: postgresql
key: username
- name: TRUSTD_DB_PASSWORD
valueFrom:
secretKeyRef:
name: postgresql
key: password
- name: TRUSTD_DB_HOST
valueFrom:
secretKeyRef:
name: postgresql
key: host
- name: TRUSTD_DB_PORT
value: "5432"

- name: RUST_LOG
value: info
Loading