diff --git a/Taskfile.yml b/Taskfile.yml index 1ae2ad1..b68148b 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -42,9 +42,11 @@ tasks: vars: CLEAN: '{{default "true" .CLEAN}}' SSI: '{{default "false" .SSI}}' + SQL: '{{default "false" .SQL}}' env: # https://docs.gradle.org/current/userguide/build_environment.html#sec:project_properties ORG_GRADLE_PROJECT_useSSI: "{{.SSI}}" + ORG_GRADLE_PROJECT_useSQLStore: "{{.SQL}}" cmds: - gradle {{if eq .CLEAN "true"}}clean{{end}} build @@ -143,6 +145,7 @@ tasks: vars: CLEAN: '{{default "false" .CLEAN}}' SSI: '{{default "false" .SSI}}' + SQL: '{{default "false" .SQL}}' cmds: - docker compose -f {{.ROOT_DIR}}/mock-backend/docker-compose.yml up -d --build --wait - task: build-connector diff --git a/connector/gradle/libs.versions.toml b/connector/gradle/libs.versions.toml index 12e9e54..29bf2e9 100644 --- a/connector/gradle/libs.versions.toml +++ b/connector/gradle/libs.versions.toml @@ -17,6 +17,7 @@ kafkaClients = "3.6.1" json = "20230227" swaggerParser = "2.1.15" slugify = "3.0.4" +postgresql = "42.7.3" [libraries] assertj = { module = "org.assertj:assertj-core", version.ref = "assertj" } @@ -84,6 +85,11 @@ opentelemetry = "io.opentelemetry.javaagent:opentelemetry-javaagent:2.1.0" json = { module = "org.json:json", version.ref = "json" } swaggerParser = { module = "io.swagger.parser.v3:swagger-parser", version.ref = "swaggerParser" } slugify = { module = "com.github.slugify:slugify", version.ref = "slugify" } +edc-sql-pool-apache-commons = { module = "org.eclipse.edc:sql-pool-apache-commons", version.ref = "edc" } +edc-transaction-local = { module = "org.eclipse.edc:transaction-local", version.ref = "edc" } +edc-transaction-datasource-spi = { module = "org.eclipse.edc:transaction-datasource-spi", version.ref = "edc" } +edc-sql-control-plane-sql = { module = "org.eclipse.edc:control-plane-sql", version.ref = "edc" } +postgresql-postgresql = { module = "org.postgresql:postgresql", version.ref = "postgresql" } [plugins] shadow = { id = "com.github.johnrengelman.shadow", version = "8.1.1" } diff --git a/connector/openapi-connector/build.gradle.kts b/connector/openapi-connector/build.gradle.kts index 40e8ffe..5e5f38b 100644 --- a/connector/openapi-connector/build.gradle.kts +++ b/connector/openapi-connector/build.gradle.kts @@ -41,6 +41,19 @@ dependencies { } else { implementation(libs.edc.iam.mock) } + + implementation(libs.postgresql.postgresql) + + if ( + project.hasProperty("useSQLStore") && + project.property("useSQLStore").toString().toBoolean() + ) { + // https://github.com/eclipse-edc/Connector/discussions/3242 + implementation(libs.edc.sql.control.plane.sql) + implementation(libs.edc.sql.pool.apache.commons) + implementation(libs.edc.transaction.local) + implementation(libs.edc.transaction.datasource.spi) + } } application { diff --git a/connector/openapi-connector/src/main/java/eu/datacellar/connector/OpenAPICoreExtension.java b/connector/openapi-connector/src/main/java/eu/datacellar/connector/OpenAPICoreExtension.java index b823c1b..cbe82d7 100644 --- a/connector/openapi-connector/src/main/java/eu/datacellar/connector/OpenAPICoreExtension.java +++ b/connector/openapi-connector/src/main/java/eu/datacellar/connector/OpenAPICoreExtension.java @@ -5,10 +5,14 @@ import static org.eclipse.edc.spi.query.Criterion.criterion; import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; import java.net.URL; import java.util.Map; import java.util.UUID; +import javax.sql.DataSource; + import org.eclipse.edc.connector.contract.spi.offer.store.ContractDefinitionStore; import org.eclipse.edc.connector.contract.spi.types.offer.ContractDefinition; import org.eclipse.edc.connector.core.CoreServicesExtension; @@ -35,8 +39,10 @@ import org.eclipse.edc.spi.system.ServiceExtension; import org.eclipse.edc.spi.system.ServiceExtensionContext; import org.eclipse.edc.spi.types.domain.asset.Asset; +import org.eclipse.edc.transaction.datasource.spi.DataSourceRegistry; import org.json.JSONArray; import org.json.JSONObject; +import org.postgresql.ds.PGSimpleDataSource; import com.github.slugify.Slugify; @@ -60,6 +66,9 @@ public class OpenAPICoreExtension implements ServiceExtension { private static final String PUBLIC_API_URL_KEY = "publicApiUrl"; private static final String DEFAULT_HOSTNAME = "localhost"; private static final String WEB_HTTP_PUBLIC_URL = "web.http.public.url"; + private static final String DATASOURCE_URL = "edc.datasource.default.url"; + private static final String DATASOURCE_USER = "edc.datasource.default.user"; + private static final String DATASOURCE_PASSWORD = "edc.datasource.default.password"; /** * The name of the extension. @@ -106,6 +115,9 @@ public class OpenAPICoreExtension implements ServiceExtension { @Inject private PolicyEngine policyEngine; + @Inject + private DataSourceRegistry dataSourceRegistry; + @Override public String name() { return NAME; @@ -326,6 +338,52 @@ public OpenAPI readOpenAPISchema(Monitor monitor) { return openAPI; } + private void ensureDefaultDataSource(ServiceExtensionContext context) { + Monitor monitor = context.getMonitor(); + + String pgUrl = context.getSetting(DATASOURCE_URL, null); + String pgUser = context.getSetting(DATASOURCE_USER, null); + String pgPass = context.getSetting(DATASOURCE_PASSWORD, null); + + if (pgUrl == null || pgUser == null || pgPass == null) { + monitor.info("Undefined PostgreSQL connection properties: Skipping data source registration"); + return; + } + + DataSource resolvedSource = dataSourceRegistry.resolve(DataSourceRegistry.DEFAULT_DATASOURCE); + + if (resolvedSource != null) { + monitor.info("Data source '%s' is already registered".formatted(DataSourceRegistry.DEFAULT_DATASOURCE)); + return; + } + + URI uri; + + try { + uri = new URI(pgUrl.substring(5)); // remove "jdbc:" + } catch (URISyntaxException e) { + e.printStackTrace(); + monitor.warning("Invalid PostgreSQL URL: Skipping data source registration"); + return; + } + + String host = uri.getHost(); + int port = uri.getPort(); + String dbName = uri.getPath().substring(1); + + PGSimpleDataSource dataSource = new PGSimpleDataSource(); + dataSource.setServerNames(new String[] { host }); + dataSource.setPortNumbers(new int[] { port }); + dataSource.setDatabaseName(dbName); + dataSource.setUser(pgUser); + dataSource.setPassword(pgPass); + + monitor.info("Manually registering data source '%s' to '%s:%s/%s' with username %s" + .formatted(DataSourceRegistry.DEFAULT_DATASOURCE, host, port, dbName, pgUser)); + + dataSourceRegistry.register(DataSourceRegistry.DEFAULT_DATASOURCE, dataSource); + } + @Override public void initialize(ServiceExtensionContext context) { Monitor monitor = context.getMonitor(); @@ -333,6 +391,12 @@ public void initialize(ServiceExtensionContext context) { DataPlaneInstance dataPlane = buildDataPlaneInstance(context); dataPlaneStore.create(dataPlane); + // ToDo: Review this + // Data sources should be registered automatically, but I keep getting this: + // "java.lang.NullPointerException: DataSource could not be resolved" + // So I'm registering the default data source manually for now. + ensureDefaultDataSource(context); + openapiUrl = context.getSetting(OPENAPI_URL, null); if (openapiUrl != null) { diff --git a/dev-config/dev-consumer.properties b/dev-config/dev-consumer.properties index 8275cd8..ab05d81 100644 --- a/dev-config/dev-consumer.properties +++ b/dev-config/dev-consumer.properties @@ -22,4 +22,7 @@ eu.datacellar.wallet.url=http://host.docker.internal:7001 eu.datacellar.wallet.email=consumer@ctic.es eu.datacellar.wallet.password=consumer eu.datacellar.trust.did=did:web:gaiax.cticpoc.com:anchor -eu.datacellar.uniresolver.url=https://uniresolver.test.ctic.es/1.0/identifiers \ No newline at end of file +eu.datacellar.uniresolver.url=https://uniresolver.test.ctic.es/1.0/identifiers +edc.datasource.default.url=jdbc:postgresql://host.docker.internal:25432/connector +edc.datasource.default.user=postgres +edc.datasource.default.password=postgres \ No newline at end of file diff --git a/dev-config/dev-provider.properties b/dev-config/dev-provider.properties index d3fdf2e..37281bd 100644 --- a/dev-config/dev-provider.properties +++ b/dev-config/dev-provider.properties @@ -23,4 +23,7 @@ eu.datacellar.wallet.url=http://host.docker.internal:7061 eu.datacellar.wallet.email=provider@ctic.es eu.datacellar.wallet.password=provider eu.datacellar.trust.did=did:web:gaiax.cticpoc.com:anchor -eu.datacellar.uniresolver.url=https://uniresolver.test.ctic.es/1.0/identifiers \ No newline at end of file +eu.datacellar.uniresolver.url=https://uniresolver.test.ctic.es/1.0/identifiers +edc.datasource.default.url=jdbc:postgresql://host.docker.internal:15432/connector +edc.datasource.default.user=postgres +edc.datasource.default.password=postgres \ No newline at end of file diff --git a/dev-config/sql-store-schemas/README.md b/dev-config/sql-store-schemas/README.md new file mode 100644 index 0000000..166b2ac --- /dev/null +++ b/dev-config/sql-store-schemas/README.md @@ -0,0 +1,3 @@ +# Connector SQL store schemas + +SQL store schemas downloaded from the [eclipse-edc/Connector repository](https://github.com/eclipse-edc/Connector/tree/v0.4.1/extensions/control-plane/store/sql). diff --git a/dev-config/sql-store-schemas/asset.sql b/dev-config/sql-store-schemas/asset.sql new file mode 100644 index 0000000..91ac2da --- /dev/null +++ b/dev-config/sql-store-schemas/asset.sql @@ -0,0 +1,30 @@ +-- +-- Copyright (c) 2022 - 2023 Daimler TSS GmbH +-- +-- This program and the accompanying materials are made available under the +-- terms of the Apache License, Version 2.0 which is available at +-- https://www.apache.org/licenses/LICENSE-2.0 +-- +-- SPDX-License-Identifier: Apache-2.0 +-- +-- Contributors: +-- Daimler TSS GmbH - Initial SQL Query +-- Bayerische Motoren Werke Aktiengesellschaft (BMW AG) - improvements +-- + +-- THIS SCHEMA HAS BEEN WRITTEN AND TESTED ONLY FOR POSTGRES + +-- table: edc_asset +CREATE TABLE IF NOT EXISTS edc_asset +( + asset_id VARCHAR NOT NULL, + created_at BIGINT NOT NULL, + properties JSON DEFAULT '{}', + private_properties JSON DEFAULT '{}', + data_address JSON DEFAULT '{}', + PRIMARY KEY (asset_id) +); + +COMMENT ON COLUMN edc_asset.properties IS 'Asset properties serialized as JSON'; +COMMENT ON COLUMN edc_asset.private_properties IS 'Asset private properties serialized as JSON'; +COMMENT ON COLUMN edc_asset.data_address IS 'Asset DataAddress serialized as JSON'; \ No newline at end of file diff --git a/dev-config/sql-store-schemas/contract-definition.sql b/dev-config/sql-store-schemas/contract-definition.sql new file mode 100644 index 0000000..ad29cfe --- /dev/null +++ b/dev-config/sql-store-schemas/contract-definition.sql @@ -0,0 +1,27 @@ +-- +-- Copyright (c) 2022 Daimler TSS GmbH +-- +-- This program and the accompanying materials are made available under the +-- terms of the Apache License, Version 2.0 which is available at +-- https://www.apache.org/licenses/LICENSE-2.0 +-- +-- SPDX-License-Identifier: Apache-2.0 +-- +-- Contributors: +-- Daimler TSS GmbH - Initial SQL Query +-- Microsoft Corporation - refactoring +-- SAP SE - add private properties to contract definition +-- + +-- table: edc_contract_definitions +-- only intended for and tested with H2 and Postgres! +CREATE TABLE IF NOT EXISTS edc_contract_definitions +( + created_at BIGINT NOT NULL, + contract_definition_id VARCHAR NOT NULL, + access_policy_id VARCHAR NOT NULL, + contract_policy_id VARCHAR NOT NULL, + assets_selector JSON NOT NULL, + private_properties JSON, + PRIMARY KEY (contract_definition_id) +); \ No newline at end of file diff --git a/dev-config/sql-store-schemas/contract-negotiation.sql b/dev-config/sql-store-schemas/contract-negotiation.sql new file mode 100644 index 0000000..f8920c1 --- /dev/null +++ b/dev-config/sql-store-schemas/contract-negotiation.sql @@ -0,0 +1,82 @@ +-- Statements are designed for and tested with Postgres only! + +CREATE TABLE IF NOT EXISTS edc_lease +( + leased_by VARCHAR NOT NULL, + leased_at BIGINT, + lease_duration INTEGER DEFAULT 60000 NOT NULL, + lease_id VARCHAR NOT NULL + CONSTRAINT lease_pk + PRIMARY KEY +); + +COMMENT ON COLUMN edc_lease.leased_at IS 'posix timestamp of lease'; + +COMMENT ON COLUMN edc_lease.lease_duration IS 'duration of lease in milliseconds'; + + +CREATE UNIQUE INDEX IF NOT EXISTS lease_lease_id_uindex + ON edc_lease (lease_id); + + + +CREATE TABLE IF NOT EXISTS edc_contract_agreement +( + agr_id VARCHAR NOT NULL + CONSTRAINT contract_agreement_pk + PRIMARY KEY, + provider_agent_id VARCHAR, + consumer_agent_id VARCHAR, + signing_date BIGINT, + start_date BIGINT, + end_date INTEGER, + asset_id VARCHAR NOT NULL, + policy JSON +); + + +CREATE TABLE IF NOT EXISTS edc_contract_negotiation +( + id VARCHAR NOT NULL + CONSTRAINT contract_negotiation_pk + PRIMARY KEY, + created_at BIGINT NOT NULL, + updated_at BIGINT NOT NULL, + correlation_id VARCHAR, + counterparty_id VARCHAR NOT NULL, + counterparty_address VARCHAR NOT NULL, + protocol VARCHAR NOT NULL, + type VARCHAR NOT NULL, + state INTEGER DEFAULT 0 NOT NULL, + state_count INTEGER DEFAULT 0, + state_timestamp BIGINT, + error_detail VARCHAR, + agreement_id VARCHAR + CONSTRAINT contract_negotiation_contract_agreement_id_fk + REFERENCES edc_contract_agreement, + contract_offers JSON, + callback_addresses JSON, + trace_context JSON, + pending BOOLEAN DEFAULT FALSE, + lease_id VARCHAR + CONSTRAINT contract_negotiation_lease_lease_id_fk + REFERENCES edc_lease + ON DELETE SET NULL, + CONSTRAINT provider_correlation_id CHECK (type = '0' OR correlation_id IS NOT NULL) +); + +COMMENT ON COLUMN edc_contract_negotiation.agreement_id IS 'ContractAgreement serialized as JSON'; + +COMMENT ON COLUMN edc_contract_negotiation.contract_offers IS 'List serialized as JSON'; + +COMMENT ON COLUMN edc_contract_negotiation.trace_context IS 'Map serialized as JSON'; + + +CREATE INDEX IF NOT EXISTS contract_negotiation_correlationid_index + ON edc_contract_negotiation (correlation_id); + +CREATE UNIQUE INDEX IF NOT EXISTS contract_negotiation_id_uindex + ON edc_contract_negotiation (id); + +CREATE UNIQUE INDEX IF NOT EXISTS contract_agreement_id_uindex + ON edc_contract_agreement (agr_id); \ No newline at end of file diff --git a/dev-config/sql-store-schemas/policy-definition.sql b/dev-config/sql-store-schemas/policy-definition.sql new file mode 100644 index 0000000..29143bf --- /dev/null +++ b/dev-config/sql-store-schemas/policy-definition.sql @@ -0,0 +1,40 @@ +-- +-- Copyright (c) 2022 ZF Friedrichshafen AG +-- +-- This program and the accompanying materials are made available under the +-- terms of the Apache License, Version 2.0 which is available at +-- https://www.apache.org/licenses/LICENSE-2.0 +-- +-- SPDX-License-Identifier: Apache-2.0 +-- +-- Contributors: +-- ZF Friedrichshafen AG - Initial SQL Query +-- + +-- Statements are designed for and tested with Postgres only! + +-- table: edc_policydefinitions +CREATE TABLE IF NOT EXISTS edc_policydefinitions +( + policy_id VARCHAR NOT NULL, + created_at BIGINT NOT NULL, + permissions JSON, + prohibitions JSON, + duties JSON, + extensible_properties JSON, + inherits_from VARCHAR, + assigner VARCHAR, + assignee VARCHAR, + target VARCHAR, + policy_type VARCHAR NOT NULL, + PRIMARY KEY (policy_id) +); + +COMMENT ON COLUMN edc_policydefinitions.permissions IS 'Java List serialized as JSON'; +COMMENT ON COLUMN edc_policydefinitions.prohibitions IS 'Java List serialized as JSON'; +COMMENT ON COLUMN edc_policydefinitions.duties IS 'Java List serialized as JSON'; +COMMENT ON COLUMN edc_policydefinitions.extensible_properties IS 'Java Map serialized as JSON'; +COMMENT ON COLUMN edc_policydefinitions.policy_type IS 'Java PolicyType serialized as JSON'; + +CREATE UNIQUE INDEX IF NOT EXISTS edc_policydefinitions_id_uindex + ON edc_policydefinitions (policy_id); \ No newline at end of file diff --git a/dev-config/sql-store-schemas/transfer-process.sql b/dev-config/sql-store-schemas/transfer-process.sql new file mode 100644 index 0000000..8e3ba65 --- /dev/null +++ b/dev-config/sql-store-schemas/transfer-process.sql @@ -0,0 +1,81 @@ +-- Statements are designed for and tested with Postgres only! + +CREATE TABLE IF NOT EXISTS edc_lease +( + leased_by VARCHAR NOT NULL, + leased_at BIGINT, + lease_duration INTEGER NOT NULL, + lease_id VARCHAR NOT NULL + CONSTRAINT lease_pk + PRIMARY KEY +); + +COMMENT ON COLUMN edc_lease.leased_at IS 'posix timestamp of lease'; + +COMMENT ON COLUMN edc_lease.lease_duration IS 'duration of lease in milliseconds'; + +CREATE TABLE IF NOT EXISTS edc_transfer_process +( + transferprocess_id VARCHAR NOT NULL + CONSTRAINT transfer_process_pk + PRIMARY KEY, + type VARCHAR NOT NULL, + state INTEGER NOT NULL, + state_count INTEGER DEFAULT 0 NOT NULL, + state_time_stamp BIGINT, + created_at BIGINT NOT NULL, + updated_at BIGINT NOT NULL, + trace_context JSON, + error_detail VARCHAR, + resource_manifest JSON, + provisioned_resource_set JSON, + content_data_address JSON, + deprovisioned_resources JSON, + private_properties JSON, + callback_addresses JSON, + pending BOOLEAN DEFAULT FALSE, + lease_id VARCHAR + CONSTRAINT transfer_process_lease_lease_id_fk + REFERENCES edc_lease + ON DELETE SET NULL +); + +COMMENT ON COLUMN edc_transfer_process.trace_context IS 'Java Map serialized as JSON'; + +COMMENT ON COLUMN edc_transfer_process.resource_manifest IS 'java ResourceManifest serialized as JSON'; + +COMMENT ON COLUMN edc_transfer_process.provisioned_resource_set IS 'ProvisionedResourceSet serialized as JSON'; + +COMMENT ON COLUMN edc_transfer_process.content_data_address IS 'DataAddress serialized as JSON'; + +COMMENT ON COLUMN edc_transfer_process.deprovisioned_resources IS 'List of deprovisioned resources, serialized as JSON'; + + +CREATE UNIQUE INDEX IF NOT EXISTS transfer_process_id_uindex + ON edc_transfer_process (transferprocess_id); + +CREATE TABLE IF NOT EXISTS edc_data_request +( + datarequest_id VARCHAR NOT NULL + CONSTRAINT data_request_pk + PRIMARY KEY, + process_id VARCHAR NOT NULL, + connector_address VARCHAR NOT NULL, + protocol VARCHAR NOT NULL, + connector_id VARCHAR, + asset_id VARCHAR NOT NULL, + contract_id VARCHAR NOT NULL, + data_destination JSON NOT NULL, + transfer_process_id VARCHAR NOT NULL + CONSTRAINT data_request_transfer_process_id_fk + REFERENCES edc_transfer_process + ON UPDATE RESTRICT ON DELETE CASCADE +); + +COMMENT ON COLUMN edc_data_request.data_destination IS 'DataAddress serialized as JSON'; + +CREATE UNIQUE INDEX IF NOT EXISTS data_request_id_uindex + ON edc_data_request (datarequest_id); + +CREATE UNIQUE INDEX IF NOT EXISTS lease_lease_id_uindex + ON edc_lease (lease_id); \ No newline at end of file diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 0b40058..44afc34 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -141,7 +141,47 @@ services: - ./dev-config/waltid/wallet-provider:/waltid-wallet-api/config extra_hosts: - host.docker.internal:host-gateway + consumer_postgres: + image: postgres:14 + container_name: consumer_postgres + restart: on-failure + ports: + - 25432:5432 + environment: + POSTGRES_DB: connector + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + volumes: + - consumer_postgres_data:/var/lib/postgresql/data + - ./dev-config/sql-store-schemas/:/docker-entrypoint-initdb.d/ + healthcheck: + test: [CMD-SHELL, pg_isready -U postgres] + interval: 30s + timeout: 10s + retries: 3 + start_period: 30s + provider_postgres: + image: postgres:14 + container_name: provider_postgres + restart: on-failure + ports: + - 15432:5432 + environment: + POSTGRES_DB: connector + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + volumes: + - provider_postgres_data:/var/lib/postgresql/data + - ./dev-config/sql-store-schemas/:/docker-entrypoint-initdb.d/ + healthcheck: + test: [CMD-SHELL, pg_isready -U postgres] + interval: 30s + timeout: 10s + retries: 3 + start_period: 30s volumes: consumer_wallet_data: {} anchor_wallet_data: {} provider_wallet_data: {} + consumer_postgres_data: {} + provider_postgres_data: {}