From 6f51724bdce90548f2fa8e598e66d1266b14a851 Mon Sep 17 00:00:00 2001 From: Ivan Herman Date: Thu, 7 Mar 2024 16:47:49 +0100 Subject: [PATCH] Fully working, compiled, documented --- README.md | 14 +- dist/index.d.ts | 214 ++--- dist/index.js | 619 +++++---------- dist/lib/crypto_utils.d.ts | 74 ++ dist/lib/crypto_utils.js | 274 +++++++ dist/lib/proof_utils.d.ts | 61 ++ dist/lib/proof_utils.js | 209 +++++ dist/lib/{errors.d.ts => types.d.ts} | 36 +- dist/lib/{errors.js => types.js} | 25 +- dist/lib/utils.d.ts | 38 +- dist/lib/utils.js | 73 +- docs/assets/navigation.js | 2 +- docs/assets/search.js | 2 +- docs/classes/index.DI_ECDSA.html | 62 -- ...ib_errors.Invalid_Verification_Method.html | 11 - .../lib_errors.Malformed_Proof_Error.html | 11 - .../lib_errors.Mismatched_Proof_Purpose.html | 11 - docs/classes/lib_errors.ProblemDetail.html | 11 - .../lib_errors.Proof_Generation_Error.html | 11 - ...lib_types.Invalid_Verification_Method.html | 11 + .../lib_types.Malformed_Proof_Error.html | 11 + .../lib_types.Mismatched_Proof_Purpose.html | 11 + docs/classes/lib_types.ProblemDetail.html | 11 + .../lib_types.Proof_Generation_Error.html | 11 + .../classes/lib_types.Unclassified_Error.html | 11 + docs/classes/lib_utils.DatasetMap.html | 6 +- docs/enums/index.Confidentiality.html | 4 - docs/enums/lib_types.Cryptosuites.html | 4 + docs/functions/index.embedProofGraph.html | 6 + docs/functions/index.generateProofGraph.html | 6 + .../index.verifyEmbeddedProofGraph.html | 17 + docs/functions/index.verifyProofGraph.html | 13 + .../lib_crypto_utils.cryptosuiteId.html | 3 + .../lib_crypto_utils.generateKey.html | 4 + docs/functions/lib_crypto_utils.sign.html | 6 + docs/functions/lib_crypto_utils.verify.html | 4 + .../lib_proof_utils.generateAProofGraph.html | 5 + .../functions/lib_proof_utils.rdf_prefix.html | 1 + .../functions/lib_proof_utils.sec_prefix.html | 4 + .../lib_proof_utils.verifyAProofGraph.html | 15 + .../functions/lib_proof_utils.xsd_prefix.html | 1 + .../lib_utils.arrayBufferToBase64Url.html | 3 - .../lib_utils.base64UrlToArrayBuffer.html | 3 - .../lib_utils.calculateDatasetHash.html | 2 +- docs/functions/lib_utils.convertToStore.html | 2 +- docs/functions/lib_utils.createPrefix.html | 2 +- docs/functions/lib_utils.isDatasetCore.html | 2 +- docs/functions/lib_utils.isKeyData.html | 2 + .../lib_utils.textToArrayBuffer.html | 2 - docs/functions/lib_utils.write_quads.html | 2 +- docs/hierarchy.html | 2 +- docs/index.html | 23 +- docs/interfaces/index.KeyPair.html | 9 - docs/interfaces/index.VerificationResult.html | 5 - .../lib_crypto_utils.KeyDetails.html | 5 + docs/interfaces/lib_types.Errors.html | 3 + docs/interfaces/lib_types.KeyData.html | 7 + docs/interfaces/lib_types.KeyMetadata.html | 5 + docs/interfaces/lib_types.KeyPair.html | 3 + .../lib_types.VerificationResult.html | 5 + docs/interfaces/lib_utils.MapContent.html | 4 +- docs/modules/index.html | 18 +- docs/modules/lib_crypto_utils.html | 17 + docs/modules/lib_errors.html | 6 - docs/modules/lib_proof_utils.html | 24 + docs/modules/lib_types.html | 14 + docs/modules/lib_utils.html | 13 +- docs/types/lib_crypto_utils.Alg.html | 2 + docs/types/lib_crypto_utils.Crv.html | 2 + docs/types/lib_crypto_utils.Hsh.html | 2 + docs/types/lib_crypto_utils.Kty.html | 2 + docs/variables/lib_proof_utils.rdf_type.html | 1 + .../lib_proof_utils.sec_assertionMethod.html | 1 + ..._proof_utils.sec_authenticationMethod.html | 1 + .../lib_proof_utils.sec_created.html | 1 + .../lib_proof_utils.sec_di_proof.html | 1 + .../lib_proof_utils.sec_expires.html | 1 + docs/variables/lib_proof_utils.sec_proof.html | 1 + .../lib_proof_utils.sec_proofPurpose.html | 1 + .../lib_proof_utils.sec_proofValue.html | 1 + .../lib_proof_utils.sec_publicKeyJwk.html | 1 + .../lib_proof_utils.sec_revoked.html | 1 + ...ib_proof_utils.sec_verificationMethod.html | 1 + .../lib_proof_utils.xsd_datetime.html | 1 + examples/small_with_proofs.ttl | 60 +- index.ts | 730 +++++------------- lib/common.ts | 34 - lib/crypto_utils.ts | 96 ++- lib/{proofs.ts => proof_utils.ts} | 138 ++-- lib/{errors.ts => types.ts} | 51 +- lib/utils.ts | 83 +- testing/keys.json | 52 +- testing/keys_alt.json | 81 ++ testing/run/keys.ts | 4 +- testing/run/main.ts | 70 +- testing/tests/small.ttl | 2 + 96 files changed, 1870 insertions(+), 1658 deletions(-) create mode 100644 dist/lib/crypto_utils.d.ts create mode 100644 dist/lib/crypto_utils.js create mode 100644 dist/lib/proof_utils.d.ts create mode 100644 dist/lib/proof_utils.js rename dist/lib/{errors.d.ts => types.d.ts} (54%) rename dist/lib/{errors.js => types.js} (68%) delete mode 100644 docs/classes/index.DI_ECDSA.html delete mode 100644 docs/classes/lib_errors.Invalid_Verification_Method.html delete mode 100644 docs/classes/lib_errors.Malformed_Proof_Error.html delete mode 100644 docs/classes/lib_errors.Mismatched_Proof_Purpose.html delete mode 100644 docs/classes/lib_errors.ProblemDetail.html delete mode 100644 docs/classes/lib_errors.Proof_Generation_Error.html create mode 100644 docs/classes/lib_types.Invalid_Verification_Method.html create mode 100644 docs/classes/lib_types.Malformed_Proof_Error.html create mode 100644 docs/classes/lib_types.Mismatched_Proof_Purpose.html create mode 100644 docs/classes/lib_types.ProblemDetail.html create mode 100644 docs/classes/lib_types.Proof_Generation_Error.html create mode 100644 docs/classes/lib_types.Unclassified_Error.html delete mode 100644 docs/enums/index.Confidentiality.html create mode 100644 docs/enums/lib_types.Cryptosuites.html create mode 100644 docs/functions/index.embedProofGraph.html create mode 100644 docs/functions/index.generateProofGraph.html create mode 100644 docs/functions/index.verifyEmbeddedProofGraph.html create mode 100644 docs/functions/index.verifyProofGraph.html create mode 100644 docs/functions/lib_crypto_utils.cryptosuiteId.html create mode 100644 docs/functions/lib_crypto_utils.generateKey.html create mode 100644 docs/functions/lib_crypto_utils.sign.html create mode 100644 docs/functions/lib_crypto_utils.verify.html create mode 100644 docs/functions/lib_proof_utils.generateAProofGraph.html create mode 100644 docs/functions/lib_proof_utils.rdf_prefix.html create mode 100644 docs/functions/lib_proof_utils.sec_prefix.html create mode 100644 docs/functions/lib_proof_utils.verifyAProofGraph.html create mode 100644 docs/functions/lib_proof_utils.xsd_prefix.html delete mode 100644 docs/functions/lib_utils.arrayBufferToBase64Url.html delete mode 100644 docs/functions/lib_utils.base64UrlToArrayBuffer.html create mode 100644 docs/functions/lib_utils.isKeyData.html delete mode 100644 docs/functions/lib_utils.textToArrayBuffer.html delete mode 100644 docs/interfaces/index.KeyPair.html delete mode 100644 docs/interfaces/index.VerificationResult.html create mode 100644 docs/interfaces/lib_crypto_utils.KeyDetails.html create mode 100644 docs/interfaces/lib_types.Errors.html create mode 100644 docs/interfaces/lib_types.KeyData.html create mode 100644 docs/interfaces/lib_types.KeyMetadata.html create mode 100644 docs/interfaces/lib_types.KeyPair.html create mode 100644 docs/interfaces/lib_types.VerificationResult.html create mode 100644 docs/modules/lib_crypto_utils.html delete mode 100644 docs/modules/lib_errors.html create mode 100644 docs/modules/lib_proof_utils.html create mode 100644 docs/modules/lib_types.html create mode 100644 docs/types/lib_crypto_utils.Alg.html create mode 100644 docs/types/lib_crypto_utils.Crv.html create mode 100644 docs/types/lib_crypto_utils.Hsh.html create mode 100644 docs/types/lib_crypto_utils.Kty.html create mode 100644 docs/variables/lib_proof_utils.rdf_type.html create mode 100644 docs/variables/lib_proof_utils.sec_assertionMethod.html create mode 100644 docs/variables/lib_proof_utils.sec_authenticationMethod.html create mode 100644 docs/variables/lib_proof_utils.sec_created.html create mode 100644 docs/variables/lib_proof_utils.sec_di_proof.html create mode 100644 docs/variables/lib_proof_utils.sec_expires.html create mode 100644 docs/variables/lib_proof_utils.sec_proof.html create mode 100644 docs/variables/lib_proof_utils.sec_proofPurpose.html create mode 100644 docs/variables/lib_proof_utils.sec_proofValue.html create mode 100644 docs/variables/lib_proof_utils.sec_publicKeyJwk.html create mode 100644 docs/variables/lib_proof_utils.sec_revoked.html create mode 100644 docs/variables/lib_proof_utils.sec_verificationMethod.html create mode 100644 docs/variables/lib_proof_utils.xsd_datetime.html delete mode 100644 lib/common.ts rename lib/{proofs.ts => proof_utils.ts} (50%) rename lib/{errors.ts => types.ts} (60%) create mode 100644 testing/keys_alt.json diff --git a/README.md b/README.md index a090809..4df6c49 100644 --- a/README.md +++ b/README.md @@ -10,12 +10,22 @@ The steps for signature follow the "usual" approach for signing data, namely: 1. The input RDF Dataset is canonicalized, using the [RDF Dataset Canonicalization](https://www.w3.org/TR/rdf-canon/), as defined by the W3C. 2. The resulting canonical N-Quads are sorted, and hashed to yield a canonical hash of the Dataset (the W3C specification relies on SHA-256 for hashing by default, which is used here). -3. The hash is signed using a secret key for ECDSA. The signature value is stored as a base64url value following the [Multibase](https://datatracker.ietf.org/doc/draft-multiformats-multibase) format. +3. The hash is signed using a secret key. The signature value is stored as a base64url value following the [Multibase](https://datatracker.ietf.org/doc/draft-multiformats-multibase) format. 4. A separate "proof graph" is generated, that includes the signature value, some basic metadata, and the public key of for the signature, stored in [JWK format](https://www.rfc-editor.org/rfc/rfc7517). The package has separate API entries to generate, and validate, such proof graphs. It is also possible, following the DI spec, to provide "embedded" proofs, i.e., a new dataset, containing the original data, as well as the proof graph(s), each as a separate graph within the dataset. If a separate "anchor" resource is provided, then this new dataset will also contain additional RDF triples connecting the anchor to the proof graphs. -- [Separate document for the API](https://iherman.github.io/rdfjs-di/classes/index.DI_ECDSA.html) +The crypto layer for the package relies on the Web Crypto API specification, and its implementation in `node.js` or `deno`. Accordingly, the following crypto algorithms are available for this implementation + +- [ECDSA](https://w3c.github.io/webcrypto/#ecdsa) +- [RSA-PSS](https://w3c.github.io/webcrypto/#rsa-pss) +- [RSASSA-PKCS1-v1_5](https://w3c.github.io/webcrypto/#rsassa-pkcs1) + +Although not strictly necessary for this package, a separate method is available as part of the API to generate cryptography keys for one of these three algorithms. Note that only ECDSA is part of the [VC Working Groups' specification](https://www.w3.org/TR/vc-di-ecdsa/), identified by the cryptosuite name `ecdsa-2022`; the other two are non-standard, and are identified with the temporary cryptosuite name of `rdfjs-di-rsa-pss` and `rdfjs-di-rsa-ssa`, respectively. + +For more details, see: + +- [Separate document for the API](https://iherman.github.io/rdfjs-di/modules/index.html) - [A small RDF graph](https://github.com/iherman/rdfjs-di/blob/main/examples/small.ttl) and its ["verifiable" version with embedded proof graphs](https://github.com/iherman/rdfjs-di/blob/main/examples/small_with_proofs.ttl) (Note that the API works on an RDF Data model level, and does not include a Turtle/TriG parser or serializer; that should be done separately.) diff --git a/dist/index.d.ts b/dist/index.d.ts index eaf7edd..4329dbf 100644 --- a/dist/index.d.ts +++ b/dist/index.d.ts @@ -1,149 +1,83 @@ +/** + * Externally visible API level for the package. + * + * + * @packageDocumentation + */ import * as rdf from '@rdfjs/types'; -import * as n3 from 'n3'; -import { ProblemDetail } from './lib/errors'; -export { ProblemDetail } from './lib/errors'; -/** Values used internally for the crypto functions; they are defined by the WebCrypto spec. */ -export declare enum Confidentiality { - public = "public", - secret = "secret" -} -export interface VerificationResult { - verified: boolean; - verifiedDocument: rdf.DatasetCore; - warnings: ProblemDetail[]; - errors: ProblemDetail[]; -} +import { KeyData, VerificationResult } from './lib/types'; +export { KeyData, VerificationResult, KeyMetadata, Cryptosuites } from './lib/types'; +export { generateKey, KeyDetails } from './lib/crypto_utils'; /** - * Crypto key pair. The keys are stored in JWK format. - * At the moment, this seems the dominant format for keys in WebCrypto. + * Generate a (separate) proof graph (or graphs), per the DI spec. The signature is stored in + * multibase format, using base64url encoding. Keys are accepted, and stored in JWK format. * - * The values for controller, expires, and revoked, are all optional (see spec for details) + * @param dataset + * @param keyData + * @throws - an error if there was a key issue while signing. + * @returns */ -export interface KeyPair { - public: JsonWebKey; - private: JsonWebKey; - controller?: string; - expires?: string; - revoked?: string; -} -/***************************************************************************************** - * The real meat... - *****************************************************************************************/ +export declare function generateProofGraph(dataset: rdf.DatasetCore, keyData: Iterable): Promise; +export declare function generateProofGraph(dataset: rdf.DatasetCore, keyData: KeyData): Promise; /** - * Subclasses are supposed to set the right algorithm, cryptosuite, etc, names. + * Verify the separate proof graph. + * + * The validity result is the conjunction of the validation result for each proof graphs separately. * + * The following checks are made: + * + * 1. There should be exactly one [proof value](https://www.w3.org/TR/vc-data-integrity/#dfn-proofvalue) + * 2. There should be exactly one [verification method](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod), which should be a separate resource containing the key (in JWK) + * 3. The key's (optional) [expiration](https://www.w3.org/TR/vc-data-integrity/#defn-proof-expires) and + * [revocation](https://www.w3.org/TR/vc-data-integrity/#dfn-revoked) dates are checked and compared to the current time which should be "before" + * 4. The proof's [creation date](https://www.w3.org/TR/vc-data-integrity/#dfn-created) must be before the current time + * 5. The proof [purpose(s)](https://www.w3.org/TR/vc-data-integrity/#dfn-proofpurpose) must be set, and the values are either [authentication](https://www.w3.org/TR/vc-data-integrity/#dfn-authentication) or [verification](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod) + * + * If any of those errors are found, the validation result is `false`. The error reports themselves, with some more details, are part of the verification result structure. + * + * @param dataset + * @param proofGraph + * @returns */ -declare abstract class DataIntegrity { - protected _algorithm: string; - protected _cryptosuite: string; - protected _hash: string; - protected _curve: string; - protected _result: VerificationResult; - constructor(); - protected initResults(): void; - /**************************************************************************************************/ - /**************************************************************************************************/ - /** - * Import a JWK encoded key into a key usable by crypto.subtle. - * - * @param key - the key itself - * @param type - whether this is a private or public key (usable to sign or verify, respectively) - * - * @returns - */ - protected importKey(key: JsonWebKey, type: Confidentiality): Promise; - /** - * Generate a (separate) proof graph, per the DI spec. The signature is stored in - * multibase format, using base64url encoding. - * - * @param hashValue - this is the value of the Dataset's canonical hash - * @param keyPair - * @returns - */ - protected generateAProofGraph(hashValue: string, keyPair: KeyPair): Promise; - /** - * Check one proof graph, ie, whether the included signature corresponds to the hash value. - * - * The following checks are also made and, possibly, exception are raised with errors according to - * the DI standard: - * - * 1. There should be exactly one proof value - * 2. There should be exactly one verification method, which should be a separate resource containing the key - * 3. The key's possible expiration and revocation dates are checked and compared to the current time which should be - * "before" - * 4. The proof's creation date must be before the current time - * 5. The proof purpose(s) must be set, and the values are either authentication or verification - * - * @param hash - * @param proof - * @returns - */ - protected verifyAProofGraph(hash: string, proof: n3.Store, proofId?: rdf.Quad_Graph): Promise; - /** - * Generate a (separate) proof graph (or graphs), per the DI spec. The signature is stored in - * multibase format, using base64url encoding. - * - * This is just a wrapper around {@link generateAProofGraph} to take care of multiple key pairs. - * - * @param dataset - * @param keyPair - * @throws - an error if there was a key issue while signing. - * @returns - */ - generateProofGraph(dataset: rdf.DatasetCore, keyPair: Iterable): Promise; - generateProofGraph(dataset: rdf.DatasetCore, keyPair: KeyPair): Promise; - /** - * Verify the separate proof graph. - * - * For now, this methods just does the minimum as a proof of concept. A more elaborate version will have - * to verify all details of the proof graph. - * - * @param dataset - * @param proofGraph - * @returns - */ - verifyProofGraph(dataset: rdf.DatasetCore, proofGraph: rdf.DatasetCore): Promise; - verifyProofGraph(dataset: rdf.DatasetCore, proofGraph: rdf.DatasetCore[]): Promise; - /** - * Create a new dataset with the copy of the original and the proof graph as a separate graph within the - * dataset. - * - * The separate quad with the `proof` property is added; if the anchor is properly defined, then that - * will be the subject, otherwise a new blank node. (The latter may be meaningless, but makes it easier - * to find the proof graph for verification.) - * - * If the `keyPair` argument is an Array, then the proof graphs are considered to be a Proof Chain. Otherwise, - * (e.g., if it is a Set), it is a Proof Set. - * - * Just wrapper around {@link generateProofGraph}. - * @param dataset - * @param keyPair - * @param anchor - * @returns - */ - embedProofGraph(dataset: rdf.DatasetCore, keyPair: KeyPair | Iterable, anchor?: rdf.Quad_Subject): Promise; - /** - * Verify the dataset with embedded proof graphs. The individual proof graphs are identified by the presence - * of a type relationship to `DataIntegrityProof`; the result is the conjunction of the validation result for - * each proof graphs separately. - * - * The following checks are also made and, possibly, exception are raised with errors according to - * the DI standard: - * - * 1. There should be exactly one proof value - * 2. There should be exactly one verification method, which should be a separate resource containing the key - * 3. The key's possible expiration and revocation dates are checked and compared to the current time which should be "before" - * 4. The proof's creation date must be before the current time - * 5. The proof purpose(s) must be set, and the values are either authentication or verification - - * @param dataset - * @returns - */ - verifyEmbeddedProofGraph(dataset: rdf.DatasetCore): Promise; -} +export declare function verifyProofGraph(dataset: rdf.DatasetCore, proofGraph: rdf.DatasetCore | rdf.DatasetCore[]): Promise; /** - * Real instantiation of a DI cryptosuite: ecdsa-2022. + * Create a new dataset with the copy of the original and the proof graph(s) as a separate graph(s) within the + * dataset (a.k.a. "Embedded Proof" in the DI spec terminology). + * + * If the anchor is defined, then that will be the subject for quads with the `proof` property is added (one for each proof graph). + * + * If the `keyPair` argument is an Array, then the proof graphs are considered to be a Proof Chain. Otherwise, + * (e.g., if it is a Set), it is a Proof Set. + * + * @param dataset + * @param keyData + * @param anchor + * @returns */ -export declare class DI_ECDSA extends DataIntegrity { - constructor(); -} +export declare function embedProofGraph(dataset: rdf.DatasetCore, keyData: KeyData | Iterable, anchor?: rdf.Quad_Subject): Promise; +/** + * Verify the dataset with embedded proof graph(s). + * + * If the anchor is present, the proof graphs are identified by the object terms of the corresponding [`proof`](https://www.w3.org/TR/vc-data-integrity/#proofs) quads. + * Otherwise, the type relationship to [`DataIntegrityProof`](https://www.w3.org/TR/vc-data-integrity/#dataintegrityproof) are considered. Note that if no anchor is provided, this second choice + * may lead to erroneous results because some of the embedded proof graphs are not meant to be a proof for the full dataset. (This may + * be the case in a ["Verifiable Presentation" style datasets](https://www.w3.org/TR/vc-data-model-2.0/#presentations-0).) + * + * The validity result is the conjunction of the validation result for each proof graphs separately. + * + * The following checks are also made. + * + * 1. There should be exactly one [proof value](https://www.w3.org/TR/vc-data-integrity/#dfn-proofvalue) + * 2. There should be exactly one [verification method](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod), which should be a separate resource containing the key (in JWK) + * 3. The key's (optional) [expiration](https://www.w3.org/TR/vc-data-integrity/#defn-proof-expires) and + * [revocation](https://www.w3.org/TR/vc-data-integrity/#dfn-revoked) dates are checked and compared to the current time which should be "before" + * 4. The proof's [creation date](https://www.w3.org/TR/vc-data-integrity/#dfn-created) must be before the current time + * 5. The proof [purpose(s)](https://www.w3.org/TR/vc-data-integrity/#dfn-proofpurpose) must be set, and the values are either [authentication](https://www.w3.org/TR/vc-data-integrity/#dfn-authentication) or [verification](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod) + * + * If any of those errors occur, the overall validity result is `false`. The error reports themselves, with some more details, are part of the verification result structure. + * + * @param dataset + * @param anchor + * @returns +*/ +export declare function verifyEmbeddedProofGraph(dataset: rdf.DatasetCore, anchor?: rdf.Quad_Subject): Promise; diff --git a/dist/index.js b/dist/index.js index e27f3e6..e888fef 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,469 +1,214 @@ "use strict"; +/** + * Externally visible API level for the package. + * + * + * @packageDocumentation + */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.DI_ECDSA = exports.Confidentiality = exports.ProblemDetail = void 0; +exports.verifyEmbeddedProofGraph = exports.embedProofGraph = exports.verifyProofGraph = exports.generateProofGraph = exports.generateKey = exports.Cryptosuites = void 0; const n3 = require("n3"); -const uuid_1 = require("uuid"); -const errors = require("./lib/errors"); -var errors_1 = require("./lib/errors"); -Object.defineProperty(exports, "ProblemDetail", { enumerable: true, get: function () { return errors_1.ProblemDetail; } }); +const types = require("./lib/types"); const utils_1 = require("./lib/utils"); +const proof_utils_1 = require("./lib/proof_utils"); +/* This file is also the "top level", so a number of exports are put here to be more friendly to users */ +var types_1 = require("./lib/types"); +Object.defineProperty(exports, "Cryptosuites", { enumerable: true, get: function () { return types_1.Cryptosuites; } }); +var crypto_utils_1 = require("./lib/crypto_utils"); +Object.defineProperty(exports, "generateKey", { enumerable: true, get: function () { return crypto_utils_1.generateKey; } }); // n3.DataFactory is a namespace with some functions... -const { namedNode, literal, quad } = n3.DataFactory; -/** Values used internally for the crypto functions; they are defined by the WebCrypto spec. */ -var Confidentiality; -(function (Confidentiality) { - Confidentiality["public"] = "public"; - Confidentiality["secret"] = "secret"; -})(Confidentiality || (exports.Confidentiality = Confidentiality = {})); +const { quad } = n3.DataFactory; +async function generateProofGraph(dataset, keyData) { + // Start fresh with results + const report = { errors: [], warnings: [] }; + // This is to be signed + const toBeSigned = await (0, utils_1.calculateDatasetHash)(dataset); + // prepare for the overload of arguments + const keyPairs = (0, utils_1.isKeyData)(keyData) ? [keyData] : keyData; + // execute the proof graph generation concurrently + const promises = Array.from(keyPairs).map((keypair) => (0, proof_utils_1.generateAProofGraph)(report, toBeSigned, keypair)); + const retval = await Promise.all(promises); + // return by taking care of overloading. + if (report.errors.length !== 0) { + // There were possible errors while generating the signatures + const message = JSON.stringify(report.errors, null, 4); + throw new types.Proof_Generation_Error(`${message}`); + } + else { + return (0, utils_1.isKeyData)(keyData) ? retval[0] : retval; + } +} +exports.generateProofGraph = generateProofGraph; /** - * Type guard to check if an object implements the KeyPair interface. + * Verify the separate proof graph. + * + * The validity result is the conjunction of the validation result for each proof graphs separately. + * + * The following checks are made: + * + * 1. There should be exactly one [proof value](https://www.w3.org/TR/vc-data-integrity/#dfn-proofvalue) + * 2. There should be exactly one [verification method](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod), which should be a separate resource containing the key (in JWK) + * 3. The key's (optional) [expiration](https://www.w3.org/TR/vc-data-integrity/#defn-proof-expires) and + * [revocation](https://www.w3.org/TR/vc-data-integrity/#dfn-revoked) dates are checked and compared to the current time which should be "before" + * 4. The proof's [creation date](https://www.w3.org/TR/vc-data-integrity/#dfn-created) must be before the current time + * 5. The proof [purpose(s)](https://www.w3.org/TR/vc-data-integrity/#dfn-proofpurpose) must be set, and the values are either [authentication](https://www.w3.org/TR/vc-data-integrity/#dfn-authentication) or [verification](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod) * - * @param obj + * If any of those errors are found, the validation result is `false`. The error reports themselves, with some more details, are part of the verification result structure. + * + * @param dataset + * @param proofGraph * @returns */ -function isKeyPair(obj) { - return obj.public !== undefined && obj.private !== undefined; +async function verifyProofGraph(dataset, proofGraph) { + // start fresh with the results: + const report = { errors: [], warnings: [] }; + // this is the value that must be checked... + const hash = await (0, utils_1.calculateDatasetHash)(dataset); + // just to make the handling uniform... + const proofs = (0, utils_1.isDatasetCore)(proofGraph) ? [proofGraph] : proofGraph; + // the "convertToStore" intermediate step is necessary; the proof graph checker needs a n3.Store + const promises = proofs.map(utils_1.convertToStore).map((pr_graph) => (0, proof_utils_1.verifyAProofGraph)(report, hash, pr_graph)); + const results = await Promise.all(promises); + const verified = (report.errors.length > 0) ? false : !results.includes(false); + return { + verified, + verifiedDocument: verified ? dataset : null, + errors: report.errors, + warnings: report.warnings + }; } -/*************************************************************************************** - * Namespaces and specific terms that are used several times - **************************************************************************************/ -/* Various namespaces, necessary when constructing a proof graph */ -const sec_prefix = (0, utils_1.createPrefix)("https://w3id.org/security#"); -const rdf_prefix = (0, utils_1.createPrefix)("http://www.w3.org/1999/02/22-rdf-syntax-ns#"); -const xsd_prefix = (0, utils_1.createPrefix)("http://www.w3.org/2001/XMLSchema#"); -const rdf_type = rdf_prefix('type'); -const sec_proof = sec_prefix('proof'); -const sec_proofGraph = sec_prefix('ProofGraph'); -const sec_di_proof = sec_prefix('DataIntegrityProof'); -const sec_proofValue = sec_prefix('proofValue'); -const sec_publicKeyJwk = sec_prefix('publicKeyJwk'); -const sec_proofPurpose = sec_prefix('proofPurpose'); -const sec_authenticationMethod = sec_prefix('authenticationMethod'); -const sec_assertionMethod = sec_prefix('assertionMethod'); -const sec_verificationMethod = sec_prefix('verificationMethod'); -const sec_expires = sec_prefix('expires'); -const sec_revoked = sec_prefix('revoked'); -const sec_created = sec_prefix('created'); -const xsd_datetime = xsd_prefix('dateTime'); -/***************************************************************************************** - * The real meat... - *****************************************************************************************/ +exports.verifyProofGraph = verifyProofGraph; /** - * Subclasses are supposed to set the right algorithm, cryptosuite, etc, names. + * Create a new dataset with the copy of the original and the proof graph(s) as a separate graph(s) within the + * dataset (a.k.a. "Embedded Proof" in the DI spec terminology). + * + * If the anchor is defined, then that will be the subject for quads with the `proof` property is added (one for each proof graph). + * + * If the `keyPair` argument is an Array, then the proof graphs are considered to be a Proof Chain. Otherwise, + * (e.g., if it is a Set), it is a Proof Set. * + * @param dataset + * @param keyData + * @param anchor + * @returns */ -class DataIntegrity { - _algorithm; - _cryptosuite; - _hash; - _curve; - _result; - constructor() { - this._hash = "SHA-256"; - this.initResults(); - } - initResults() { - this._result = { - verified: false, - verifiedDocument: null, - warnings: [], - errors: [], - }; - } - // get algorithm(): string { return this._algorithm } - // get cryptosuite(): string { return this._cryptosuite; } - // get hash(): string { return this._hash; } - // get curve(): string { return this._curve; } - /**************************************************************************************************/ - /* Internal functions. All of them are protected, ie, usable by the concrete subclasses */ - /**************************************************************************************************/ - /** - * Import a JWK encoded key into a key usable by crypto.subtle. - * - * @param key - the key itself - * @param type - whether this is a private or public key (usable to sign or verify, respectively) - * - * @returns - */ - async importKey(key, type) { - try { - const retval = await crypto.subtle.importKey("jwk", key, { - name: this._algorithm, - namedCurve: this._curve, - }, true, type === Confidentiality.public ? ["verify"] : ["sign"]); - if (retval === null) { - this._result.errors.push(new errors.Invalid_Verification_Method(`Invalid key: ${JSON.stringify(key, null, 4)}`)); +async function embedProofGraph(dataset, keyData, anchor) { + const retval = (0, utils_1.convertToStore)(dataset); + const keyPairs = (0, utils_1.isKeyData)(keyData) ? [keyData] : Array.from(keyData); + const proofGraphs = await generateProofGraph(dataset, keyPairs); + const isKeyChain = keyPairs.length > 1 && Array.isArray(keyData); + const chain = []; + for (let i = 0; i < proofGraphs.length; i++) { + const proofTriples = proofGraphs[i]; + const proofGraphID = retval.createBlankNode(); + for (const q of proofTriples) { + retval.add(quad(q.subject, q.predicate, q.object, proofGraphID)); + if (isKeyChain && q.predicate.value === proof_utils_1.rdf_type.value && q.object.value === proof_utils_1.sec_di_proof.value) { + // Storing the values to create the proof chains in a subsequent step + // The subject is the ID of the proof + chain.push({ + proofId: q.subject, + graph: proofGraphID, + }); } - return retval; } - catch (e) { - this._result.errors.push(new errors.Invalid_Verification_Method(`Invalid key: ${JSON.stringify(key)} (${e.message})`)); - return null; + ; + if (anchor) { + const q = quad(anchor, proof_utils_1.sec_proof, proofGraphID); + retval.add(q); } } - ; - /** - * Generate a (separate) proof graph, per the DI spec. The signature is stored in - * multibase format, using base64url encoding. - * - * @param hashValue - this is the value of the Dataset's canonical hash - * @param keyPair - * @returns - */ - async generateAProofGraph(hashValue, keyPair) { - // Calculate the hash of the dataset, and sign the hash with the secret key - // This is the "core"... - const signHashValue = async () => { - const key = await this.importKey(keyPair.private, Confidentiality.secret); - if (key === null) { - return ""; - } - else { - const raw_signature = await crypto.subtle.sign({ - name: this._algorithm, - hash: this._hash - }, key, (0, utils_1.textToArrayBuffer)(hashValue)); - return `u${(0, utils_1.arrayBufferToBase64Url)(raw_signature)}`; - } - }; - // Create a proof graph. Just a boring set of quad generations... - const createProofGraph = (proofValue) => { - const retval = new n3.Store(); - // Unique URL-s, for the time being as uuid-s - const proofGraphId = `urn:uuid:${(0, uuid_1.v4)()}`; - const proofGraph = namedNode(proofGraphId); - const verificationMethodId = `urn:uuid:${(0, uuid_1.v4)()}`; - const keyResource = namedNode(verificationMethodId); - retval.addQuads([ - quad(proofGraph, rdf_type, sec_di_proof), - quad(proofGraph, sec_prefix('cryptosuite'), literal(this._cryptosuite)), - quad(proofGraph, sec_created, literal((new Date()).toISOString(), xsd_datetime)), - quad(proofGraph, sec_verificationMethod, keyResource), - quad(proofGraph, sec_proofValue, literal(proofValue)), - quad(proofGraph, sec_proofPurpose, sec_authenticationMethod), - quad(proofGraph, sec_proofPurpose, sec_assertionMethod), - quad(keyResource, rdf_type, sec_prefix('JsonWebKey')), - quad(keyResource, sec_publicKeyJwk, literal(JSON.stringify(keyPair.public), rdf_prefix('JSON'))), - ]); - if (keyPair.controller) - retval.add(quad(keyResource, sec_prefix('controller'), namedNode(keyPair.controller))); - if (keyPair.expires) - retval.add(quad(keyResource, sec_expires, literal(keyPair.expires, xsd_datetime))); - if (keyPair.revoked) - retval.add(quad(keyResource, sec_revoked, literal(keyPair.revoked, xsd_datetime))); - return retval; - }; - return createProofGraph(await signHashValue()); - } - /** - * Check one proof graph, ie, whether the included signature corresponds to the hash value. - * - * The following checks are also made and, possibly, exception are raised with errors according to - * the DI standard: - * - * 1. There should be exactly one proof value - * 2. There should be exactly one verification method, which should be a separate resource containing the key - * 3. The key's possible expiration and revocation dates are checked and compared to the current time which should be - * "before" - * 4. The proof's creation date must be before the current time - * 5. The proof purpose(s) must be set, and the values are either authentication or verification - * - * @param hash - * @param proof - * @returns - */ - async verifyAProofGraph(hash, proof, proofId) { - let localErrors = []; - let localWarnings = []; - // Verify the signature by check signature of the hash with the key - // This is the "core"... - const checkHashValue = async (proof_value, key_jwk) => { - const key = await this.importKey(key_jwk, Confidentiality.public); - const signature_array = (0, utils_1.base64UrlToArrayBuffer)(proof_value.slice(1)); - const data = (0, utils_1.textToArrayBuffer)(hash); - if (key === null) { - return false; - } - else { - const retval = await crypto.subtle.verify({ - name: this._algorithm, - hash: this._hash - }, key, signature_array, data); - return retval; - } - }; - const getProofValue = (store) => { - // Retrieve the signature value per spec: - const proof_values = store.getQuads(null, sec_proofValue, null, null); - if (proof_values.length === 0) { - localErrors.push(new errors.Malformed_Proof_Error("No proof value")); - return null; - } - else if (proof_values.length > 1) { - localErrors.push(new errors.Malformed_Proof_Error("Several proof values")); - } - return proof_values[0].object.value; - }; - const getPublicKey = (store) => { - // first see if the verificationMethod has been set properly - const verificationMethod = store.getQuads(null, sec_verificationMethod, null, null); - if (verificationMethod.length === 0) { - localErrors.push(new errors.Malformed_Proof_Error("No verification method")); - return null; - } - else if (verificationMethod.length > 1) { - localErrors.push(new errors.Malformed_Proof_Error("Several verification methods")); - } - const publicKey = verificationMethod[0].object; - const keys = store.getQuads(publicKey, sec_publicKeyJwk, null, null); - if (keys.length === 0) { - localErrors.push(new errors.Invalid_Verification_Method(`No key values`)); - return null; - } - else if (keys.length > 1) { - localErrors.push(new errors.Invalid_Verification_Method("More than one keys provided")); - } - // Check the creation/expiration/revocation dates, if any... - const now = new Date(); - const creationDates = store.getQuads(null, sec_created, null, null); - for (const exp of creationDates) { - if ((new Date(exp.object.value)) > now) { - localWarnings.push(new errors.Invalid_Verification_Method(`Proof was created in the future... ${exp.object.value}`)); - } - } - const expirationDates = store.getQuads(publicKey, sec_expires, null, null); - for (const exp of expirationDates) { - if ((new Date(exp.object.value)) < now) { - localErrors.push(new errors.Invalid_Verification_Method(`<${publicKey.value}> key expired on ${exp.object.value}`)); - return null; - } - } - const revocationDates = store.getQuads(publicKey, sec_revoked, null, null); - for (const exp of revocationDates) { - if ((new Date(exp.object.value)) < now) { - localErrors.push(new errors.Invalid_Verification_Method(`<${publicKey.value}> key was revoked on ${exp.object.value}`)); - return null; - } - } - try { - return JSON.parse(keys[0].object.value); - } - catch (e) { - // This happens if there is a JSON parse error with the key... - localWarnings.push(new errors.Malformed_Proof_Error(`Parsing error for JWK: ${e.message}`)); - return null; - } - }; - // Check the "proofPurpose" property value - const checkProofPurposes = (store) => { - const purposes = store.getQuads(null, sec_proofPurpose, null, null); - if (purposes.length === 0) { - throw new errors.Invalid_Verification_Method("No proof purpose set"); - } - else { - const wrongPurposes = []; - for (const q of purposes) { - if (!(q.object.equals(sec_authenticationMethod) || q.object.equals(sec_assertionMethod))) { - wrongPurposes.push(`<${q.object.value}>`); - } - } - if (wrongPurposes.length > 0) { - localErrors.push(new errors.Mismatched_Proof_Purpose(`Invalid proof purpose value(s): ${wrongPurposes.join(", ")}`)); - } - } - }; - // Retrieve necessary values with checks - checkProofPurposes(proof); - const publicKey = getPublicKey(proof); - const proofValue = getProofValue(proof); - // The final set of error/warning should be modified with the proof graph's ID, if applicable - if (proofId) { - localErrors.forEach((error) => { - error.detail = `${error.detail} (graph ID: <${proofId.value}>)`; - }); - localWarnings.forEach((warning) => { - warning.detail = `${warning.detail} (<${proofId.value}>)`; - }); - } - this._result.errors = [...this._result.errors, ...localErrors]; - this._result.warnings = [...this._result.warnings, ...localWarnings]; - // Here we go with checking... - if (publicKey !== null && proofValue !== null) { - const check_results = await checkHashValue(proofValue, publicKey); - // the return value should nevertheless be false if there have been errors - return check_results ? localErrors.length === 0 : true; - } - else { - return false; - } - } - async generateProofGraph(dataset, keyPair) { - // Start fresh with results - this.initResults(); - // This is to be signed - const toBeSigned = await (0, utils_1.calculateDatasetHash)(dataset); - // prepare for the overload of arguments - const keyPairs = isKeyPair(keyPair) ? [keyPair] : keyPair; - // execute the proof graph generation concurrently - const promises = Array.from(keyPairs).map((keypair) => this.generateAProofGraph(toBeSigned, keypair)); - const retval = await Promise.all(promises); - // return by taking care of overloading. - if (this._result.errors.length !== 0) { - // There were possible errors while generating the signatures - const message = JSON.stringify(this._result.errors, null, 2); - throw new errors.Proof_Generation_Error(message); - } - else { - return isKeyPair(keyPair) ? retval[0] : retval; + // Adding the chain statements, if required + if (isKeyChain) { + for (let i = 1; i < chain.length; i++) { + const q = quad(chain[i].proofId, (0, proof_utils_1.sec_prefix)("previousProof"), chain[i - 1].proofId, chain[i].graph); + retval.add(q); } } - async verifyProofGraph(dataset, proofGraph) { - // start fresh with the results: - this.initResults(); - // this is the value that must be checked... - const hash = await (0, utils_1.calculateDatasetHash)(dataset); - // just to make the handling uniform... - const proofs = (0, utils_1.isDatasetCore)(proofGraph) ? [proofGraph] : proofGraph; - // the "convertToStore" intermediate step is necessary; the proof graph checker needs a n3.Store - const promises = proofs.map(utils_1.convertToStore).map((pr_graph) => this.verifyAProofGraph(hash, pr_graph)); - const results = await Promise.all(promises); - return (0, utils_1.isDatasetCore)(proofGraph) ? results[0] : results; - } - /** - * Create a new dataset with the copy of the original and the proof graph as a separate graph within the - * dataset. - * - * The separate quad with the `proof` property is added; if the anchor is properly defined, then that - * will be the subject, otherwise a new blank node. (The latter may be meaningless, but makes it easier - * to find the proof graph for verification.) - * - * If the `keyPair` argument is an Array, then the proof graphs are considered to be a Proof Chain. Otherwise, - * (e.g., if it is a Set), it is a Proof Set. - * - * Just wrapper around {@link generateProofGraph}. - * @param dataset - * @param keyPair - * @param anchor - * @returns - */ - async embedProofGraph(dataset, keyPair, anchor) { - const retval = (0, utils_1.convertToStore)(dataset); - const keyPairs = isKeyPair(keyPair) ? [keyPair] : Array.from(keyPair); - const proofGraphs = await this.generateProofGraph(dataset, keyPairs); - const isKeyChain = keyPairs.length > 1 && Array.isArray(keyPair); - const chain = []; - for (let i = 0; i < proofGraphs.length; i++) { - const proofTriples = proofGraphs[i]; - const proofGraphID = retval.createBlankNode(); - for (const q of proofTriples) { - retval.add(quad(q.subject, q.predicate, q.object, proofGraphID)); - if (isKeyChain && q.predicate.value === rdf_type.value && q.object.value === sec_di_proof.value) { - // Storing the values to create the proof chains in a subsequent step - // The subject is the ID of the proof - chain.push({ - proofId: q.subject, - graph: proofGraphID, - }); - } - } - ; - if (anchor) { - const q = quad(anchor, sec_proof, proofGraphID); - retval.add(q); - } - } - // Adding the chain statements, if required - if (isKeyChain) { - for (let i = 1; i < chain.length; i++) { - const q = quad(chain[i].proofId, sec_prefix("previousProof"), chain[i - 1].proofId, chain[i].graph); - retval.add(q); - } - } - return retval; - } - /** - * Verify the dataset with embedded proof graphs. The individual proof graphs are identified by the presence - * of a type relationship to `DataIntegrityProof`; the result is the conjunction of the validation result for - * each proof graphs separately. - * - * The following checks are also made and, possibly, exception are raised with errors according to - * the DI standard: - * - * 1. There should be exactly one proof value - * 2. There should be exactly one verification method, which should be a separate resource containing the key - * 3. The key's possible expiration and revocation dates are checked and compared to the current time which should be "before" - * 4. The proof's creation date must be before the current time - * 5. The proof purpose(s) must be set, and the values are either authentication or verification - - * @param dataset - * @returns - */ - async verifyEmbeddedProofGraph(dataset) { - // start fresh with the results: - this.initResults(); - const dataStore = new n3.Store(); - const proofGraphs = new utils_1.DatasetMap(); - // Separate the core data from the datasets; - // First, identify the possible dataset graph IDs - for (const q of dataset) { - // A dataset can be identified with a proof property. - if (q.predicate.equals(sec_proof)) { - // the object refers to a proof graph (unless it is a literal, which is a bug!) + return retval; +} +exports.embedProofGraph = embedProofGraph; +/** + * Verify the dataset with embedded proof graph(s). + * + * If the anchor is present, the proof graphs are identified by the object terms of the corresponding [`proof`](https://www.w3.org/TR/vc-data-integrity/#proofs) quads. + * Otherwise, the type relationship to [`DataIntegrityProof`](https://www.w3.org/TR/vc-data-integrity/#dataintegrityproof) are considered. Note that if no anchor is provided, this second choice + * may lead to erroneous results because some of the embedded proof graphs are not meant to be a proof for the full dataset. (This may + * be the case in a ["Verifiable Presentation" style datasets](https://www.w3.org/TR/vc-data-model-2.0/#presentations-0).) + * + * The validity result is the conjunction of the validation result for each proof graphs separately. + * + * The following checks are also made. + * + * 1. There should be exactly one [proof value](https://www.w3.org/TR/vc-data-integrity/#dfn-proofvalue) + * 2. There should be exactly one [verification method](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod), which should be a separate resource containing the key (in JWK) + * 3. The key's (optional) [expiration](https://www.w3.org/TR/vc-data-integrity/#defn-proof-expires) and + * [revocation](https://www.w3.org/TR/vc-data-integrity/#dfn-revoked) dates are checked and compared to the current time which should be "before" + * 4. The proof's [creation date](https://www.w3.org/TR/vc-data-integrity/#dfn-created) must be before the current time + * 5. The proof [purpose(s)](https://www.w3.org/TR/vc-data-integrity/#dfn-proofpurpose) must be set, and the values are either [authentication](https://www.w3.org/TR/vc-data-integrity/#dfn-authentication) or [verification](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod) + * + * If any of those errors occur, the overall validity result is `false`. The error reports themselves, with some more details, are part of the verification result structure. + * + * @param dataset + * @param anchor + * @returns +*/ +async function verifyEmbeddedProofGraph(dataset, anchor) { + // start fresh with the results: + const report = { errors: [], warnings: [] }; + const dataStore = new n3.Store(); + const proofGraphs = new utils_1.DatasetMap(); + // First, identify the possible dataset graph IDs + for (const q of dataset) { + // Branching on whether there is an anchor explicitly setting the proof graphs + if (anchor) { + if (q.predicate.equals(proof_utils_1.sec_proof) && q.subject.equals(anchor)) { if (q.object.termType !== "Literal") { proofGraphs.item(q.object); } - // The quad is not copied to the dataStore! } - else if (q.predicate.equals(rdf_type) && q.object.equals(sec_di_proof)) { - // the triple is in a proof graph! + } + else { + // There is no anchor; we are looking for graphs whose type has been set + // This branch is the reason we have to use a DatasetMap for the + // storage of graph IDs; we should not have duplicate entries. + if (q.predicate.equals(proof_utils_1.rdf_type) && q.object.equals(proof_utils_1.sec_di_proof)) { proofGraphs.item(q.graph); } } - // By now, we got the identification of all the proof graphs, we can separate the quads among - // the data graph and the relevant proof graphs - for (const q of dataset) { - if (q.predicate.equals(sec_proof)) { - // this is an extra entry, not part of the triples that were signed - continue; - } - else if (q.graph.termType === "DefaultGraph") { - dataStore.add(q); - } - else if (proofGraphs.has(q.graph)) { - // this quad belongs to a proof graph! - // Note that the proof graphs contain only triples, because they are - // separate entities now... - proofGraphs.item(q.graph).add(quad(q.subject, q.predicate, q.object)); - } - else { - // This a bona fide data quad - dataStore.add(q); - } + } + // By now, we got the identification of all the proof graphs, we can separate the quads among + // the data graph and the relevant proof graphs + for (const q of dataset) { + if (q.predicate.equals(proof_utils_1.sec_proof) && proofGraphs.has(q.graph)) { + // this is an extra entry, not part of the triples that were signed + // neither it is part of any proof graphs + continue; + } + else if (q.graph.termType === "DefaultGraph") { + dataStore.add(q); } - const hash = await (0, utils_1.calculateDatasetHash)(dataStore); - const proofs = proofGraphs.data(); - const promises = proofs.map((prGraph) => this.verifyAProofGraph(hash, prGraph.dataset, prGraph.id)); - const results = await Promise.all(promises); - if (this._result.errors.length > 0) { - this._result.verified = false; + else if (proofGraphs.has(q.graph)) { + // this quad belongs to a proof graph! + // Note that the separated proof graphs contain only triples, they become + // stand-alone RDF graphs now + proofGraphs.item(q.graph).add(quad(q.subject, q.predicate, q.object)); } else { - this._result.verified = !results.includes(false); + // This a bona fide data quad, to be stored as such + dataStore.add(q); } - this._result.verifiedDocument = this._result.verified ? dataStore : null; - return this._result; - } - ; -} -/** - * Real instantiation of a DI cryptosuite: ecdsa-2022. - */ -class DI_ECDSA extends DataIntegrity { - constructor() { - super(); - this._algorithm = "ECDSA"; - this._cryptosuite = "ecdsa-2022"; - this._curve = "P-256"; } + const hash = await (0, utils_1.calculateDatasetHash)(dataStore); + const proofs = proofGraphs.data(); + const promises = proofs.map((prGraph) => (0, proof_utils_1.verifyAProofGraph)(report, hash, prGraph.dataset, prGraph.id)); + const results = await Promise.all(promises); + const verified = (report.errors.length > 0) ? false : !results.includes(false); + return { + verified, + verifiedDocument: verified ? dataStore : null, + errors: report.errors, + warnings: report.warnings + }; } -exports.DI_ECDSA = DI_ECDSA; +exports.verifyEmbeddedProofGraph = verifyEmbeddedProofGraph; diff --git a/dist/lib/crypto_utils.d.ts b/dist/lib/crypto_utils.d.ts new file mode 100644 index 0000000..e777153 --- /dev/null +++ b/dist/lib/crypto_utils.d.ts @@ -0,0 +1,74 @@ +/** + * "Internal API" to the WebCrypto facilities. + * + * Put into a separate file for an easier maintenance; not meant + * to be part of the external API. + * Most of them are not exported (via `index.ts`) to + * package users. + * + * Note that, at the moment, the "interchange format" for keys is restricted to JWK. One + * area of improvement may be to allow for other formats (the DI standard refers to Multikey). + * + * @packageDocumentation + */ +import { KeyMetadata, KeyData, Cryptosuites, KeyPair, Errors } from './types'; +/** JWK values for the algorithms that are relevant for this package */ +export type Alg = "RS256" | "RS384" | "RS512" | "PS256" | "PS384" | "PS512"; +/** JWK values for the elliptic curves that are relevant for this package */ +export type Crv = "P-256" | "P-384" | "P-521"; +/** JWK values for the hash methods that are relevant for this package */ +export type Hsh = "SHA-256" | "SHA-384" | "SHA-512"; +/** JWK values for the key types that are relevant for this package */ +export type Kty = "EC" | "RSA"; +/** Information that may be used when generating new keys */ +export interface KeyDetails { + namedCurve?: Crv; + hash?: Hsh; + modulusLength?: number; +} +/*********************************************************************************** + * + * The externally visible API entries + * +***********************************************************************************/ +/** + * Sign a message. + * + * Possible errors are added to the report, no exceptions should be thrown. + * + * @param report + * @param message + * @param secretKey + * @returns - either the signature in Multicode format, or `null` in case of an error. + */ +export declare function sign(report: Errors, message: string, secretKey: JsonWebKey): Promise; +/** + * Verify a signature + * + * Possible errors are added to the report, no exceptions should be thrown. + * + * @param report + * @param message + * @param secretKey + * @returns + */ +export declare function verify(report: Errors, message: string, signature: string, publicKey: JsonWebKey): Promise; +/** + * Mapping from the JWK data to the corresponding DI cryptosuite identifier. + * + * @param report + * @param keyPair + * @returns + */ +export declare function cryptosuiteId(report: Errors, keyPair: KeyPair): Cryptosuites | null; +/** + * Generate key pair to be used with DI in general. This function is not necessary for the core + * functionalities of the package, but may be useful for the package users. It is therefore + * meant to be re-exported via the `index.ts` module. + * + * @param metadata + * @param suite + * @param keyData + * @returns + */ +export declare function generateKey(suite: Cryptosuites, metadata?: KeyMetadata, keyData?: KeyDetails): Promise; diff --git a/dist/lib/crypto_utils.js b/dist/lib/crypto_utils.js new file mode 100644 index 0000000..3c378db --- /dev/null +++ b/dist/lib/crypto_utils.js @@ -0,0 +1,274 @@ +"use strict"; +/** + * "Internal API" to the WebCrypto facilities. + * + * Put into a separate file for an easier maintenance; not meant + * to be part of the external API. + * Most of them are not exported (via `index.ts`) to + * package users. + * + * Note that, at the moment, the "interchange format" for keys is restricted to JWK. One + * area of improvement may be to allow for other formats (the DI standard refers to Multikey). + * + * @packageDocumentation + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.generateKey = exports.cryptosuiteId = exports.verify = exports.sign = void 0; +const types = require("./types"); +const types_1 = require("./types"); +/*********************************************************************************** + * + * JWK vs. WebCrypto API mappings + * +***********************************************************************************/ +/** Default values for keys, some of them can be overwritten */ +const SALT_LENGTH = 32; +const DEFAULT_MODUS_LENGTH = 2048; +const DEFAULT_HASH = "SHA-256"; +const DEFAULT_CURVE = "P-256"; +/** + * Mapping between the "alg values in the JWK instance and the necessary + * terms for the WebCrypto API + */ +const RsaAlgs = { + "PS256": { name: 'RSA-PSS', hash: 'SHA-256', saltLength: SALT_LENGTH }, + "PS384": { name: 'RSA-PSS', hash: 'SHA-384', saltLength: SALT_LENGTH }, + "PS512": { name: 'RSA-PSS', hash: 'SHA-512', saltLength: SALT_LENGTH }, + "RS256": { name: 'RSASSA-PKCS1-v1_5', hash: 'SHA-256' }, + "RS384": { name: 'RSASSA-PKCS1-v1_5', hash: 'SHA-384' }, + "RS512": { name: 'RSASSA-PKCS1-v1_5', hash: 'SHA-512' }, +}; +/** + * Mapping of the JWK instance and the corresponding terms for the WebCrypto API + * + * @param report + * @param key + * @returns + */ +function algorithmData(report, key) { + switch (key.kty) { + case "EC": { + return { + name: "ECDSA", + namedCurve: key.crv, + hash: DEFAULT_HASH + }; + } + case "RSA": { + try { + return RsaAlgs[key.alg]; + } + catch (e) { + report.errors.push(new types.Unclassified_Error(`Key's error in 'alg': ${e.message}`)); + return null; + } + } + } +} +/** + * Export a WebCrypto crypto key pair into their JWK equivalent. + * + * @param newPair + * @returns + */ +async function toJWK(newPair) { + const publicKey = await crypto.subtle.exportKey("jwk", newPair.publicKey); + const privateKey = await crypto.subtle.exportKey("jwk", newPair.privateKey); + return { public: publicKey, private: privateKey }; +} +/*********************************************************************************** + * + * Utilities for ArrayBuffer vs. string representations + * +***********************************************************************************/ +/* + * These two came from perplexity, hopefully it is correct... + */ +const base64ToUrl = (base64String) => { + return base64String.replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, ''); +}; +const urlToBase64 = (base64Url) => { + return base64Url.replace(/-/g, '+').replace(/_/g, '/'); +}; +/** + * Text to array buffer, needed for crypto operations + * @param text + */ +function textToArrayBuffer(text) { + return (new TextEncoder()).encode(text).buffer; +} +/** + * Convert an array buffer to a base64url value. + * + * (Created with the help of chatgpt...) + * + * @param arrayBuffer + * @returns + */ +function arrayBufferToBase64Url(arrayBuffer) { + const bytes = new Uint8Array(arrayBuffer); + let binary = ""; + for (let i = 0; i < bytes.length; i++) { + binary += String.fromCharCode(bytes[i]); + } + const base64String = btoa(binary); + return base64ToUrl(base64String); +} +/** + * Convert a base64url value to an array buffer + * + * (Created with the help of chatgpt...) + * + * @param url + * @returns + */ +function base64UrlToArrayBuffer(url) { + const base64string = urlToBase64(url); + const binary = atob(base64string); + const byteArray = new Uint8Array(binary.length); + for (let i = 0; i < binary.length; i++) { + byteArray[i] = binary.charCodeAt(i); + } + return byteArray.buffer; +} +/*********************************************************************************** + * + * The externally visible API entries + * +***********************************************************************************/ +/** + * Sign a message. + * + * Possible errors are added to the report, no exceptions should be thrown. + * + * @param report + * @param message + * @param secretKey + * @returns - either the signature in Multicode format, or `null` in case of an error. + */ +async function sign(report, message, secretKey) { + // Prepare the message to signature: + const rawMessage = textToArrayBuffer(message); + // The crypto algorithm to be used with this key: + const algorithm = algorithmData(report, secretKey); + if (algorithm === null) { + return null; + } + else { + try { + // Import the JWK key into crypto key: + const key = await crypto.subtle.importKey("jwk", secretKey, algorithm, true, ["sign"]); + const rawSignature = await crypto.subtle.sign(algorithm, key, rawMessage); + // Turn the the signature into Base64URL, and the into multicode + return `u${arrayBufferToBase64Url(rawSignature)}`; + } + catch (e) { + report.errors.push(new types.Proof_Generation_Error(e.message)); + return null; + } + } +} +exports.sign = sign; +/** + * Verify a signature + * + * Possible errors are added to the report, no exceptions should be thrown. + * + * @param report + * @param message + * @param secretKey + * @returns + */ +async function verify(report, message, signature, publicKey) { + const rawMessage = textToArrayBuffer(message); + if (signature.length === 0 || signature[0] !== 'u') { + report.errors.push(new types.Malformed_Proof_Error(`Signature is of an incorrect format (${signature})`)); + return false; + } + const rawSignature = base64UrlToArrayBuffer(signature.slice(1)); + // get the keys: + const algorithm = algorithmData(report, publicKey); + if (algorithm === null) { + return false; + } + else { + try { + const key = await crypto.subtle.importKey("jwk", publicKey, algorithm, true, ["verify"]); + const retval = await crypto.subtle.verify(algorithm, key, rawSignature, rawMessage); + return retval; + } + catch (e) { + report.errors.push(new types.Proof_Generation_Error(e.message)); + return false; + } + } +} +exports.verify = verify; +/** + * Mapping from the JWK data to the corresponding DI cryptosuite identifier. + * + * @param report + * @param keyPair + * @returns + */ +function cryptosuiteId(report, keyPair) { + // Some elementary check + if (keyPair.private.kty !== keyPair.public.kty || + keyPair.private.crv !== keyPair.public.crv || + keyPair.private.alg !== keyPair.private.alg) { + report.errors.push(new types.Invalid_Verification_Method('Keys are not in pair (in:\n ${JSON.stringify(keyPair,null,4)})')); + return null; + } + const alg = algorithmData(report, keyPair.public); + switch (alg.name) { + case "ECDSA": return types_1.Cryptosuites.ecdsa; + case "RSA-PSS": return types_1.Cryptosuites.rsa_pss; + case "RSASSA-PKCS1-v1_5": return types_1.Cryptosuites.rsa_ssa; + default: { + report.errors.push(new types.Invalid_Verification_Method(`Unknown alg (${alg.name} in:\n ${JSON.stringify(keyPair, null, 4)})`)); + return null; + } + } +} +exports.cryptosuiteId = cryptosuiteId; +/** + * Generate key pair to be used with DI in general. This function is not necessary for the core + * functionalities of the package, but may be useful for the package users. It is therefore + * meant to be re-exported via the `index.ts` module. + * + * @param metadata + * @param suite + * @param keyData + * @returns + */ +async function generateKey(suite, metadata, keyData) { + const suiteToAPI = () => { + switch (suite) { + case types_1.Cryptosuites.ecdsa: return { + name: "ECDSA", + namedCurve: keyData?.namedCurve || DEFAULT_CURVE, + }; + case types_1.Cryptosuites.rsa_pss: return { + name: "RSA-PSS", + modulusLength: keyData?.modulusLength || DEFAULT_MODUS_LENGTH, + publicExponent: new Uint8Array([0x01, 0x00, 0x01]), + hash: keyData?.hash || DEFAULT_HASH, + }; + case types_1.Cryptosuites.rsa_ssa: return { + name: 'RSASSA-PKCS1-v1_5', + modulusLength: keyData?.modulusLength || DEFAULT_MODUS_LENGTH, + publicExponent: new Uint8Array([0x01, 0x00, 0x01]), + hash: keyData?.hash || DEFAULT_HASH, + }; + } + }; + const newPair = await crypto.subtle.generateKey(suiteToAPI(), true, ["sign", "verify"]); + const keyPair = await toJWK(newPair); + const retval = { + public: keyPair.public, + private: keyPair.private, + cryptosuite: `${suite}`, + }; + return { ...retval, ...metadata }; +} +exports.generateKey = generateKey; diff --git a/dist/lib/proof_utils.d.ts b/dist/lib/proof_utils.d.ts new file mode 100644 index 0000000..88933ff --- /dev/null +++ b/dist/lib/proof_utils.d.ts @@ -0,0 +1,61 @@ +/** + * "Internal API" for handling proof graphs. + * + * Put into a separate file for an easier maintenance; not meant + * to be part of the external API. + * They are not exported (via `index.ts`) to + * package users. + * + * @packageDocumentation + */ +import * as rdf from '@rdfjs/types'; +import * as n3 from 'n3'; +import { Errors, KeyData } from './types'; +/*************************************************************************************** + * Namespaces and specific terms that are used several times + **************************************************************************************/ +export declare const sec_prefix: (l: string) => rdf.NamedNode; +export declare const rdf_prefix: (l: string) => rdf.NamedNode; +export declare const xsd_prefix: (l: string) => rdf.NamedNode; +export declare const rdf_type: rdf.NamedNode; +export declare const sec_proof: rdf.NamedNode; +export declare const sec_di_proof: rdf.NamedNode; +export declare const sec_proofValue: rdf.NamedNode; +export declare const sec_publicKeyJwk: rdf.NamedNode; +export declare const sec_proofPurpose: rdf.NamedNode; +export declare const sec_authenticationMethod: rdf.NamedNode; +export declare const sec_assertionMethod: rdf.NamedNode; +export declare const sec_verificationMethod: rdf.NamedNode; +export declare const sec_expires: rdf.NamedNode; +export declare const sec_revoked: rdf.NamedNode; +export declare const sec_created: rdf.NamedNode; +export declare const xsd_datetime: rdf.NamedNode; +/** + * Generate a (separate) proof graph, per the DI spec. The signature is stored in + * [multibase format](https://www.w3.org/TR/vc-data-integrity/#multibase-0), using base64url encoding. + * + * @param hashValue - this is the value of the Dataset's canonical hash + * @param keyData + * @returns + */ +export declare function generateAProofGraph(report: Errors, hashValue: string, keyData: KeyData): Promise; +/** + * Check one proof graph, ie, whether the included signature corresponds to the hash value. + * + * The following checks are also made: + * + * 1. There should be exactly one [proof value](https://www.w3.org/TR/vc-data-integrity/#dfn-proofvalue) + * 2. There should be exactly one [verification method](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod), which should be a separate resource containing the key (in JWK) + * 3. The key's (optional) [expiration](https://www.w3.org/TR/vc-data-integrity/#defn-proof-expires) and + * [revocation](https://www.w3.org/TR/vc-data-integrity/#dfn-revoked) dates are checked and compared to the current time which should be "before" + * 4. The proof's [creation date](https://www.w3.org/TR/vc-data-integrity/#dfn-created) must be before the current time + * 5. The proof [purpose(s)](https://www.w3.org/TR/vc-data-integrity/#dfn-proofpurpose) must be set, and the values are either [authentication](https://www.w3.org/TR/vc-data-integrity/#dfn-authentication) or [verification](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod) + * + * Errors are stored in the `report` structure. If any error occurs, the result is false. + * + * @param report + * @param hash + * @param proof + * @returns + */ +export declare function verifyAProofGraph(report: Errors, hash: string, proof: n3.Store, proofId?: rdf.Quad_Graph): Promise; diff --git a/dist/lib/proof_utils.js b/dist/lib/proof_utils.js new file mode 100644 index 0000000..a903899 --- /dev/null +++ b/dist/lib/proof_utils.js @@ -0,0 +1,209 @@ +"use strict"; +/** + * "Internal API" for handling proof graphs. + * + * Put into a separate file for an easier maintenance; not meant + * to be part of the external API. + * They are not exported (via `index.ts`) to + * package users. + * + * @packageDocumentation + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyAProofGraph = exports.generateAProofGraph = exports.xsd_datetime = exports.sec_created = exports.sec_revoked = exports.sec_expires = exports.sec_verificationMethod = exports.sec_assertionMethod = exports.sec_authenticationMethod = exports.sec_proofPurpose = exports.sec_publicKeyJwk = exports.sec_proofValue = exports.sec_di_proof = exports.sec_proof = exports.rdf_type = exports.xsd_prefix = exports.rdf_prefix = exports.sec_prefix = void 0; +const n3 = require("n3"); +const uuid_1 = require("uuid"); +const types = require("./types"); +const utils_1 = require("./utils"); +const crypto_utils_1 = require("./crypto_utils"); +// n3.DataFactory is a namespace with some functions... +const { namedNode, literal, quad } = n3.DataFactory; +/*************************************************************************************** + * Namespaces and specific terms that are used several times + **************************************************************************************/ +/* Various namespaces, necessary when constructing a proof graph */ +exports.sec_prefix = (0, utils_1.createPrefix)("https://w3id.org/security#"); +exports.rdf_prefix = (0, utils_1.createPrefix)("http://www.w3.org/1999/02/22-rdf-syntax-ns#"); +exports.xsd_prefix = (0, utils_1.createPrefix)("http://www.w3.org/2001/XMLSchema#"); +exports.rdf_type = (0, exports.rdf_prefix)('type'); +exports.sec_proof = (0, exports.sec_prefix)('proof'); +exports.sec_di_proof = (0, exports.sec_prefix)('DataIntegrityProof'); +exports.sec_proofValue = (0, exports.sec_prefix)('proofValue'); +exports.sec_publicKeyJwk = (0, exports.sec_prefix)('publicKeyJwk'); +exports.sec_proofPurpose = (0, exports.sec_prefix)('proofPurpose'); +exports.sec_authenticationMethod = (0, exports.sec_prefix)('authenticationMethod'); +exports.sec_assertionMethod = (0, exports.sec_prefix)('assertionMethod'); +exports.sec_verificationMethod = (0, exports.sec_prefix)('verificationMethod'); +exports.sec_expires = (0, exports.sec_prefix)('expires'); +exports.sec_revoked = (0, exports.sec_prefix)('revoked'); +exports.sec_created = (0, exports.sec_prefix)('created'); +exports.xsd_datetime = (0, exports.xsd_prefix)('dateTime'); +/** + * Generate a (separate) proof graph, per the DI spec. The signature is stored in + * [multibase format](https://www.w3.org/TR/vc-data-integrity/#multibase-0), using base64url encoding. + * + * @param hashValue - this is the value of the Dataset's canonical hash + * @param keyData + * @returns + */ +async function generateAProofGraph(report, hashValue, keyData) { + const cryptosuite = keyData?.cryptosuite || (0, crypto_utils_1.cryptosuiteId)(report, keyData); + // Create a proof graph. Just a boring set of quad generations... + const createProofGraph = (proofValue) => { + const retval = new n3.Store(); + // Unique URL-s, for the time being as uuid-s + const proofGraphId = `urn:uuid:${(0, uuid_1.v4)()}`; + const proofGraph = namedNode(proofGraphId); + const verificationMethodId = `urn:uuid:${(0, uuid_1.v4)()}`; + const keyResource = namedNode(verificationMethodId); + retval.addQuads([ + quad(proofGraph, exports.rdf_type, exports.sec_di_proof), + quad(proofGraph, (0, exports.sec_prefix)('cryptosuite'), literal(cryptosuite)), + quad(proofGraph, exports.sec_verificationMethod, keyResource), + quad(proofGraph, exports.sec_proofValue, literal(proofValue)), + quad(proofGraph, exports.sec_created, literal((new Date()).toISOString(), exports.xsd_datetime)), + quad(proofGraph, exports.sec_proofPurpose, exports.sec_authenticationMethod), + quad(proofGraph, exports.sec_proofPurpose, exports.sec_assertionMethod), + quad(keyResource, exports.rdf_type, (0, exports.sec_prefix)('JsonWebKey')), + quad(keyResource, exports.sec_publicKeyJwk, literal(JSON.stringify(keyData.public), (0, exports.rdf_prefix)('JSON'))), + ]); + if (keyData.controller) + retval.add(quad(keyResource, (0, exports.sec_prefix)('controller'), namedNode(keyData.controller))); + if (keyData.expires) + retval.add(quad(keyResource, exports.sec_expires, literal(keyData.expires, exports.xsd_datetime))); + if (keyData.revoked) + retval.add(quad(keyResource, exports.sec_revoked, literal(keyData.revoked, exports.xsd_datetime))); + return retval; + }; + return createProofGraph(await (0, crypto_utils_1.sign)(report, hashValue, keyData.private)); +} +exports.generateAProofGraph = generateAProofGraph; +; +/** + * Check one proof graph, ie, whether the included signature corresponds to the hash value. + * + * The following checks are also made: + * + * 1. There should be exactly one [proof value](https://www.w3.org/TR/vc-data-integrity/#dfn-proofvalue) + * 2. There should be exactly one [verification method](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod), which should be a separate resource containing the key (in JWK) + * 3. The key's (optional) [expiration](https://www.w3.org/TR/vc-data-integrity/#defn-proof-expires) and + * [revocation](https://www.w3.org/TR/vc-data-integrity/#dfn-revoked) dates are checked and compared to the current time which should be "before" + * 4. The proof's [creation date](https://www.w3.org/TR/vc-data-integrity/#dfn-created) must be before the current time + * 5. The proof [purpose(s)](https://www.w3.org/TR/vc-data-integrity/#dfn-proofpurpose) must be set, and the values are either [authentication](https://www.w3.org/TR/vc-data-integrity/#dfn-authentication) or [verification](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod) + * + * Errors are stored in the `report` structure. If any error occurs, the result is false. + * + * @param report + * @param hash + * @param proof + * @returns + */ +async function verifyAProofGraph(report, hash, proof, proofId) { + const localErrors = []; + const localWarnings = []; + const getProofValue = (store) => { + // Retrieve the signature value per spec: + const proof_values = store.getQuads(null, exports.sec_proofValue, null, null); + if (proof_values.length === 0) { + localErrors.push(new types.Malformed_Proof_Error("No proof value")); + return null; + } + else if (proof_values.length > 1) { + localErrors.push(new types.Malformed_Proof_Error("Several proof values")); + } + return proof_values[0].object.value; + }; + const getPublicKey = (store) => { + // first see if the verificationMethod has been set properly + const verificationMethod = store.getQuads(null, exports.sec_verificationMethod, null, null); + if (verificationMethod.length === 0) { + localErrors.push(new types.Malformed_Proof_Error("No verification method")); + return null; + } + else if (verificationMethod.length > 1) { + localErrors.push(new types.Malformed_Proof_Error("Several verification methods")); + } + const publicKey = verificationMethod[0].object; + const keys = store.getQuads(publicKey, exports.sec_publicKeyJwk, null, null); + if (keys.length === 0) { + localErrors.push(new types.Invalid_Verification_Method(`No key values`)); + return null; + } + else if (keys.length > 1) { + localErrors.push(new types.Invalid_Verification_Method("More than one keys provided")); + } + // Check the creation/expiration/revocation dates, if any... + const now = new Date(); + const creationDates = store.getQuads(null, exports.sec_created, null, null); + for (const exp of creationDates) { + if ((new Date(exp.object.value)) > now) { + localWarnings.push(new types.Invalid_Verification_Method(`Proof was created in the future... ${exp.object.value}`)); + } + } + const expirationDates = store.getQuads(publicKey, exports.sec_expires, null, null); + for (const exp of expirationDates) { + if ((new Date(exp.object.value)) < now) { + localErrors.push(new types.Invalid_Verification_Method(`<${publicKey.value}> key expired on ${exp.object.value}`)); + return null; + } + } + const revocationDates = store.getQuads(publicKey, exports.sec_revoked, null, null); + for (const exp of revocationDates) { + if ((new Date(exp.object.value)) < now) { + localErrors.push(new types.Invalid_Verification_Method(`<${publicKey.value}> key was revoked on ${exp.object.value}`)); + return null; + } + } + try { + return JSON.parse(keys[0].object.value); + } + catch (e) { + // This happens if there is a JSON parse error with the key... + localWarnings.push(new types.Malformed_Proof_Error(`Parsing error for JWK: ${e.message}`)); + return null; + } + }; + // Check the "proofPurpose" property value + const checkProofPurposes = (store) => { + const purposes = store.getQuads(null, exports.sec_proofPurpose, null, null); + if (purposes.length === 0) { + localErrors.push(new types.Invalid_Verification_Method("No proof purpose set")); + } + else { + const wrongPurposes = []; + for (const q of purposes) { + if (!(q.object.equals(exports.sec_authenticationMethod) || q.object.equals(exports.sec_assertionMethod))) { + wrongPurposes.push(`<${q.object.value}>`); + } + } + if (wrongPurposes.length > 0) { + localErrors.push(new types.Mismatched_Proof_Purpose(`Invalid proof purpose value(s): ${wrongPurposes.join(", ")}`)); + } + } + }; + // Retrieve necessary values with checks + checkProofPurposes(proof); + const publicKey = getPublicKey(proof); + const proofValue = getProofValue(proof); + // The final set of error/warning should be modified with the proof graph's ID, if applicable + if (proofId) { + localErrors.forEach((error) => { + error.detail = `${error.detail} (graph ID: <${proofId.value}>)`; + }); + localWarnings.forEach((warning) => { + warning.detail = `${warning.detail} (graph ID: <${proofId.value}>)`; + }); + } + report.errors = [...report.errors, ...localErrors]; + report.warnings = [...report.warnings, ...localWarnings]; + // Here we go with checking... + if (publicKey !== null && proofValue !== null) { + const check_results = await (0, crypto_utils_1.verify)(report, hash, proofValue, publicKey); + // the return value should nevertheless be false if there have been errors + return check_results ? localErrors.length === 0 : true; + } + else { + return false; + } +} +exports.verifyAProofGraph = verifyAProofGraph; diff --git a/dist/lib/errors.d.ts b/dist/lib/types.d.ts similarity index 54% rename from dist/lib/errors.d.ts rename to dist/lib/types.d.ts index ea4d615..53e3f3b 100644 --- a/dist/lib/errors.d.ts +++ b/dist/lib/types.d.ts @@ -1,10 +1,37 @@ +/** + * Common types and classes. + * + * @packageDocumentation + */ +import * as rdf from '@rdfjs/types'; +export declare enum Cryptosuites { + ecdsa = "ecdsa-2022", + rsa_pss = "rdfjs-di-rsa-pss", + rsa_ssa = "rdfjs-di-rss-ssa" +} +export interface VerificationResult extends Errors { + verified: boolean; + verifiedDocument: rdf.DatasetCore | null; +} +export interface KeyPair { + public: JsonWebKey; + private: JsonWebKey; +} +export interface KeyMetadata { + controller?: string; + expires?: string; + revoked?: string; + cryptosuite?: string; +} +export interface KeyData extends KeyMetadata, KeyPair { +} /***************************************************************************************** * Errors *****************************************************************************************/ /** * Superclass for the various error conditions. The entries are based on the DI specification. */ -export declare abstract class ProblemDetail { +export declare abstract class ProblemDetail extends Error { /** The vocabulary URL for the entry */ type: string; /** The error code */ @@ -15,6 +42,10 @@ export declare abstract class ProblemDetail { detail: string; constructor(detail: string, title: string, code: number); } +export interface Errors { + warnings: ProblemDetail[]; + errors: ProblemDetail[]; +} export declare class Proof_Generation_Error extends ProblemDetail { constructor(detail: string); } @@ -27,3 +58,6 @@ export declare class Mismatched_Proof_Purpose extends ProblemDetail { export declare class Invalid_Verification_Method extends ProblemDetail { constructor(detail: string); } +export declare class Unclassified_Error extends ProblemDetail { + constructor(detail: string); +} diff --git a/dist/lib/errors.js b/dist/lib/types.js similarity index 68% rename from dist/lib/errors.js rename to dist/lib/types.js index 1076129..4daa9fe 100644 --- a/dist/lib/errors.js +++ b/dist/lib/types.js @@ -1,13 +1,24 @@ "use strict"; +/** + * Common types and classes. + * + * @packageDocumentation + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Unclassified_Error = exports.Invalid_Verification_Method = exports.Mismatched_Proof_Purpose = exports.Malformed_Proof_Error = exports.Proof_Generation_Error = exports.ProblemDetail = exports.Cryptosuites = void 0; +var Cryptosuites; +(function (Cryptosuites) { + Cryptosuites["ecdsa"] = "ecdsa-2022"; + Cryptosuites["rsa_pss"] = "rdfjs-di-rsa-pss"; + Cryptosuites["rsa_ssa"] = "rdfjs-di-rss-ssa"; +})(Cryptosuites || (exports.Cryptosuites = Cryptosuites = {})); /***************************************************************************************** * Errors *****************************************************************************************/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Invalid_Verification_Method = exports.Mismatched_Proof_Purpose = exports.Malformed_Proof_Error = exports.Proof_Generation_Error = exports.ProblemDetail = void 0; /** * Superclass for the various error conditions. The entries are based on the DI specification. */ -class ProblemDetail { +class ProblemDetail extends Error { /** The vocabulary URL for the entry */ type; /** The error code */ @@ -17,7 +28,7 @@ class ProblemDetail { /** More detailed description of the error condition */ detail; constructor(detail, title, code) { - // super(detail); + super(detail); this.detail = detail; this.title = title; this.code = code; @@ -49,3 +60,9 @@ class Invalid_Verification_Method extends ProblemDetail { } } exports.Invalid_Verification_Method = Invalid_Verification_Method; +class Unclassified_Error extends ProblemDetail { + constructor(detail) { + super(detail, 'Unclassified error', -100); + } +} +exports.Unclassified_Error = Unclassified_Error; diff --git a/dist/lib/utils.d.ts b/dist/lib/utils.d.ts index 19fb4cc..4763de0 100644 --- a/dist/lib/utils.d.ts +++ b/dist/lib/utils.d.ts @@ -1,9 +1,17 @@ /** - * Collection of smaller utilities needed for the DI implementation. Put into a separate file for an easier maintenance; not meant - * to be part of the external API + * Collection of smaller utilities needed for the DI implementation. + * + * Put into a separate file for an easier maintenance; not meant + * to be part of the external API. + * They are not exported (via `index.ts`) to + * package users. + * + * @packageDocumentation + * */ import * as rdf from '@rdfjs/types'; import * as n3 from 'n3'; +import { KeyMetadata } from './types'; /*************************************************************************************** * Namespace handling **************************************************************************************/ @@ -60,10 +68,12 @@ export declare class DatasetMap { */ export declare function isDatasetCore(obj: any): obj is rdf.DatasetCore; /** - * Text to array buffer, needed for crypto operations - * @param text + * Type guard to check if an object implements the KeyPair interface. + * + * @param obj + * @returns */ -export declare function textToArrayBuffer(text: string): ArrayBuffer; +export declare function isKeyData(obj: any): obj is KeyMetadata; /** * Calculate the canonical hash of a dataset using the implementation of RDFC 1.0. * @@ -71,24 +81,6 @@ export declare function textToArrayBuffer(text: string): ArrayBuffer; * @returns */ export declare function calculateDatasetHash(dataset: rdf.DatasetCore): Promise; -/** - * Convert an array buffer to a base64url value. - * - * (Created with the help of chatgpt...) - * - * @param arrayBuffer - * @returns - */ -export declare function arrayBufferToBase64Url(arrayBuffer: ArrayBuffer): string; -/** - * Convert a base64url value to an array buffer - * - * (Created with the help of chatgpt...) - * - * @param url - * @returns - */ -export declare function base64UrlToArrayBuffer(url: string): ArrayBuffer; /** * Convert the dataset into an n3.Store, unless it is already a store. * This is done to manage the various quads more efficiently. diff --git a/dist/lib/utils.js b/dist/lib/utils.js index 3170b31..208b2cc 100644 --- a/dist/lib/utils.js +++ b/dist/lib/utils.js @@ -1,14 +1,20 @@ "use strict"; /** - * Collection of smaller utilities needed for the DI implementation. Put into a separate file for an easier maintenance; not meant - * to be part of the external API + * Collection of smaller utilities needed for the DI implementation. + * + * Put into a separate file for an easier maintenance; not meant + * to be part of the external API. + * They are not exported (via `index.ts`) to + * package users. + * + * @packageDocumentation + * */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.write_quads = exports.convertToStore = exports.base64UrlToArrayBuffer = exports.arrayBufferToBase64Url = exports.calculateDatasetHash = exports.textToArrayBuffer = exports.isDatasetCore = exports.DatasetMap = exports.createPrefix = void 0; +exports.write_quads = exports.convertToStore = exports.calculateDatasetHash = exports.isKeyData = exports.isDatasetCore = exports.DatasetMap = exports.createPrefix = void 0; const rdfjs_c14n_1 = require("rdfjs-c14n"); -const base64url_1 = require("base64url"); const n3 = require("n3"); -const { namedNode, literal, quad } = n3.DataFactory; +const { namedNode } = n3.DataFactory; /*************************************************************************************** * Namespace handling **************************************************************************************/ @@ -66,7 +72,10 @@ class DatasetMap { */ item(graph) { if (this.index.has(graph.value)) { - return this.index.get(graph.value).dataset; + // The '?' operator is to make deno happy. By virtue of the + // test we know that the value cannot be undefined, but + // the deno checker does not realize this... + return this.index.get(graph.value)?.dataset; } else { const dataset = new n3.Store(); @@ -105,13 +114,15 @@ function isDatasetCore(obj) { } exports.isDatasetCore = isDatasetCore; /** - * Text to array buffer, needed for crypto operations - * @param text + * Type guard to check if an object implements the KeyPair interface. + * + * @param obj + * @returns */ -function textToArrayBuffer(text) { - return (new TextEncoder()).encode(text).buffer; +function isKeyData(obj) { + return obj.public !== undefined && obj.private !== undefined; } -exports.textToArrayBuffer = textToArrayBuffer; +exports.isKeyData = isKeyData; /** * Calculate the canonical hash of a dataset using the implementation of RDFC 1.0. * @@ -125,42 +136,6 @@ async function calculateDatasetHash(dataset) { return datasetHash; } exports.calculateDatasetHash = calculateDatasetHash; -/** - * Convert an array buffer to a base64url value. - * - * (Created with the help of chatgpt...) - * - * @param arrayBuffer - * @returns - */ -function arrayBufferToBase64Url(arrayBuffer) { - const bytes = new Uint8Array(arrayBuffer); - let binary = ""; - for (let i = 0; i < bytes.length; i++) { - binary += String.fromCharCode(bytes[i]); - } - const base64String = btoa(binary); - return base64url_1.default.fromBase64(base64String); -} -exports.arrayBufferToBase64Url = arrayBufferToBase64Url; -/** - * Convert a base64url value to an array buffer - * - * (Created with the help of chatgpt...) - * - * @param url - * @returns - */ -function base64UrlToArrayBuffer(url) { - const base64string = base64url_1.default.toBase64(url); - const binary = atob(base64string); - const byteArray = new Uint8Array(binary.length); - for (let i = 0; i < binary.length; i++) { - byteArray[i] = binary.charCodeAt(i); - } - return byteArray.buffer; -} -exports.base64UrlToArrayBuffer = base64UrlToArrayBuffer; /** * Create and store the values in a dataset in a new n3 Store. This may be * necessary because the methods are not supposed to modify the original @@ -189,8 +164,8 @@ function convertToStore(dataset) { } exports.convertToStore = convertToStore; /***************************************************************************************** - * This is only used for debugging!!!! - *****************************************************************************************/ + * This is only used for debugging!!!! +*****************************************************************************************/ const prefixes = { sec: "https://w3id.org/security#", rdf: "http://www.w3.org/1999/02/22-rdf-syntax-ns", diff --git a/docs/assets/navigation.js b/docs/assets/navigation.js index 4179917..b6e4ad1 100644 --- a/docs/assets/navigation.js +++ b/docs/assets/navigation.js @@ -1 +1 @@ -window.navigationData = "data:application/octet-stream;base64,H4sIAAAAAAAAE5WVQW/aQBCF/8v2itomdVHLLYGojSok1NBeImQN9lgedb1LZ8dpUMV/r4CA12DG6cmS973vzc7s2o9/jeCzmJEhl+OzGZgVSGlGpvJ5bTG8271+W0plzcD8Ipeb0fXAZCXZnNGZ0eMRMPauoBydEFiSdYNCV1cH0ImoDf60GRxpP5GpoAyEvPuOobbSAMkJcgHZsbxz8UnFH4cR+huuZ0Cs8F4UGmRyn96NJw83DSWzEMIRcVhvM66u4z3O2C8tVhMUIKu1/s2pck9Lrj4nH94nm0XEtLQ03fNBZs/hPMbSMt2vvWrMF4o+7D6itZQ9ffBF+gUd8m6E6d0W0MfusGghU7CF5wrzdO/tz+h0qBEUKpCsPDpmNa98QD3lgkkLundPYClP43OfTlFKn6tZiu88Lj5VtZC9cHR2S686ORkjCM4YC4o+NEXtsm0dMS1WttHDJKqKwgQEAsrYM+rEllRBbh9zf8MM69u6KJB17JlcQWdgs9qC4EslXyGUPW3ocCgB0JQx97cQcJj8YKtHdHuUkOVB8x9d6vZorfLuCVnm/kF6J9vWKtA/TILp7xryoBMjoYKbwmrsnaDr/jE1uEao/k/2I57CqvsG71mNquu+LjaLf7zlI0rQBwAA" \ No newline at end of file +window.navigationData = "data:application/octet-stream;base64,H4sIAAAAAAAAE52X227bMAyG38W7LbY2PWzNXZEUbRcECHboTVEYikXHQhXLk+Q0xtB3Hxw7sRTLlLLb5Of30xYp0i9/Iw1bHY0jllPYRmdRQXQWjaO1oCUH9WX38+dMr3l0Fr2xnEbj0VmUZIxTCXk0fjkAVpCDJBoWUoj0QZIi62hpmSeaiXzP62tti5urj7MDeAOSpVUI9liJQGG9BBrCPBJ687yv9TSMPRSBmMygmhJNsIP61GkawNXF7dXluUl5ro1ZQupkfoAquUaBTvkQewbVHDShAVkauiHaRFaFFqpkGhSKOxIO8faVN4MKxdk65FmnoAnjeG6WzGa9GjTOlpG7t5Ldw8WldjpxtoxNRVC7KrbKXZXZg9VCb9UHkRopwkq6M3yiQUgrAiE7jx3hGnqEesdXHU1Xhess7vjq6DzOb79eXI+sGt94MRO58WEeVebFPKrMh5npyouZ6cqLcbQGyzXIlCRO5EF/RL6+sZqkqG9JrBMMwUlz6w6/sY/RjjBvl5xo0QtCDBQkcSEhZdsQcqdGkJKmJyA7NYLcKnoCslN7sqyrtANuiGRk6SqHvdbGXY5671GINIR3EHuAlJ3G3OtD8nwmvAx6eDvChy6XnCUzqL6/vwXDjZiQzBelLIQ6Lfc2xoMnpc4g1+3GMgedCRpq44r12SkF8j+c7DCPycbYwU7z6Ud6rGBbMGmuWzi/lXugEjbiDYKTbuUeaCKB6HBoK0eg9ZVDiQbN1kGFaep7WHNs7Qape2Dt/goaVe5lGPJybZJMlY39FvgJcDSnG2w/oD+orfG/IEx6mK3KA+p/TgzB9koPcBoEm3pACymWHNbN1tLhEl43tsmyhDbxYmSeyb2UQg7vSw2tEXkSE2n80CwoTOTxLsSToSMCS3VOeCrkGmjcxHotnAGoA1NropPsENGbFw6TgRjM5ynfEM5obNZ3fHzD9q2QMMztd75DsZQB9b+0vrrPNm8ZZC0OX4ibW3KBrmn7r69OiaxoTNWtpEBPhAScaElRZK+N3ThnH9sfnYQnJScaWudHoga38/axHRGYgcg3IPUv8VN7n9/WItB3yTTEf0pCFU40hAhuToqJyDXkw1OgwXVC7AZqX8ycFO7qblidylXVrx+v/wBug2aanhQAAA==" \ No newline at end of file diff --git a/docs/assets/search.js b/docs/assets/search.js index 0a8d53b..f87c408 100644 --- a/docs/assets/search.js +++ b/docs/assets/search.js @@ -1 +1 @@ -window.searchData = "data:application/octet-stream;base64,H4sIAAAAAAAAE72cXW/buBKG/8qBcqtNzS/Jzl23LXaLRYHibM/eGIGhWHQiVLa8kpw2CPrfDyjR0ozN0ZeTXKWNZzgv5+GQNEXl2cuzH4V3s3z2vie72LvhvreLttq78ZJdrH96vnfIU+/G22bxIdXFu+q31w/lNvV8b51GRaEL78bzfvnHFuZNCx+y3SaJ9a5MojQpn5q29O6wPbZ0YuNo2ff2Ua53ZSOpDcWCJtb+cJcm68Ehrhr7jkinnu7IhV7nuhweubGfFpmrNvQ/Ok82yToqk2z3X10c0lZGsit1vonWDbJz25G5nnHZBH6sGtPx6HBXwLOr/46e9Uj5mK0PW9PEZEmghZeR9iPKd8nuvhgvCXi+jBSd51k+QUjjN1kGHK9/6aevUZLTMqzBBSPzZB7ojDBsEjiKpgLmyWNU6qERG+sLQq6zXZlnaaoHZvIKOVwQWP/cJ7nuGEYoamt9QchcP2bfuyYaFLK1HhmSt6vWx8+rTx8+/v2+CWmbsvGOH48bo4pxiK8o88O6zPIhMa6wfVfHGulEMldRep/lSfmwHRQYmbdxyyL+LSl+2+dZqdeljv9jf5HsHnSelFX6xwpb50/7MisOCaikTmnY4XXFPUTFwzBV1vKVc3XIH4dmyZq+rqAcbz06FTW2Ly2Jz+Qc7F2Tsl6JikGysP3rStvus7z8Sz8NEwasX1XWvd7pPCr1+695lm3+yKP9sCHv9ntVqdV+7WmsUJfXm2R0YkK7hV6cvknZezVNenun45GSzn1ePkufTIx4tLQO50s1NgLT5O7dya7++B09Te5W9UedX9Thpudrnt2levtRl1GSnvURNIgMu/dAQOH4jVBnyEFbojY8boFaxsqn/fmq2i3DurxM/HUWj41vXV6o/0mZjk6A9XkZBfGE0XfVOE3XgOsg26z+qOfgJNutPhnfHkkOjzerDCr2hBJxdXx6rZDCzoqmayKcrHBANXWkLn4DhUPqjU7iWeG9isZBFUmKdJTmi6kENfslSjdZvtXxqnbrLVmnw9tULB16bMG6ez2xXjtkTS3XUfr6qrUzbdOKdVz++mq1K4FTS3WUwv5K7ZA4vVB7NcI6TYptVK4fGuOvh3yfFd15JXzeqFq7oo8uWKr7U2u2U9zksh2psrdye1I4sXjH5rK3fruTObmER+ocUMWdQi8o5AFKQS1/3j1GaRKv4KOf1RddPmRxl/oOt7ep6D4BY4u6Kw8T67pX4tTSnqC1r7oHpHNagU/Ja1+N9yd2aplPUNtf6b1ypxf7ML34rOlQJqn7qKn6pPOkKQAjKtfV4abeJO3lks1htzYaYHvQsH9uqNWBiEqJ9rnzaoWKblC4K/SbpoUeCbgd1FlnOpLiY1RGhS4/ZHm3QGQ5ISGgUPTP8lv2Ps+jp98Pmw14iuyKe2Z9Uex1lK4PaVRq25k/4aM1JxeHw0UKorYn37Lfo0IH8n952qnB7XKRirtjK8NBuF0uo5HtHnVefsv+LvuGHza9KOoPMzWt/j1EcdEZEtiNjwevnXyJ9h+yXUlcDmoDtnbj46HJPXHeVyADXSX0jYV2ZgG9oNaUukTGxW6dpgmANyfqpr5Ee+e6VrfSGo1P8pgNnzPa4O3dqTf5NLfU53cp6PDW/PK4D9H5g206bG19eVQ7VsaEBi4vE39k7KlxJVtIMWtry/2A7PxW7NWpYef9oFvf/uvm2TxxLJJs5914/FpcLzzf2yQ6jc3l3FqE762zbXVB8dZ+9o82A9lY1CbvZp6/nPlydj1j4vbWXx49qg+qX1RmzPOXzGXGkBn3/CX3hbxezCQy48hMeP5SuFoTyEx6/lK6zCQyU56/VC4zhcwCz18GLrMAmYWevwxdZiEym3v+cu4ymyOzhecvFy6zBU7vjEocO+FQgXACYxgFMylnbmaYBjNZZ9xpiYEwk3jmJMcwE2Zyz5zwGMbCTPqZ8rm4XrA5tsRkmCHAnAgZhsMMBOakyDAfZjgwJ0iGEXEDgjlZcsyIGxDcyYiflEtVL05GHDPiBgR3MuKYETcguJMRx4y4AcGdjDhmxA0I7qwxjhlxA4I7GXHMiBsQ3MmIY0bcgOBORhwzElUZORkJzEgYEGLmqjiBGQlOjk9xMqtV0xrzeXg949gQIxKGg+AuQ0xIGAxCuAwxIGEoCOkyxHyEgSCcJAXmI+Z0vzEfsSD7jfHIGdVvielIRvVbYjiSU/2WmI2s2DjHpTxZdCTZb4npSEX1W2I6MiD7jenIkOw3hiPnZL8xG1mxcVaZxHDUjOy3wnQUo/qtMB3FqX4rTEcJqt8Kw1GS6rc62RNUbJxzhsJwFL3+KExHhWS/MR01J/uN6agF2W8MJ5hR/Q4wm6Bi45wBAwwnMAikc5UKMJ3AMJDOVSrAeIJqx+ZcpQLMJzAQpHOVCk62bQaCdK5SAeYTGArSObcFGFBgMEj3ZhATCgwH6ayfACMKDQjpHHEhZhQaENLJKMSMQgNCORmFmFFoQCgnoxAzCg0I5WQUYkahIisjxIxCA0I5aYYnu2sDQjlphphRaEAoJ80QMwoNCOWkGWJG8xm56s9rRtXXK3NspePP9des5bI5DH72Vva7l2THr3nPXiC8m+dfv9rvWuZ/pmn4MkTr2r4H+OyxkHJFLysA5xA4z0nn+g4/cFu0bnxGudWvIgCvOQi2oLyO1/NbPz4D0RjhV52Q3lUHkmVWH1SaL8gwxwrkmEpU41lmoEXUDMh3QKWsOTO2hw+nqZASNBJQjVRPrlonwVsnIY2Z78mZ/RnUPxW3P0llp2/EAj4AD+3dHl4BT5BaZpUIq0TMrUKrWFnFytqFio7VvDIGQkGVnHY1NVdmRX2qDBIPBnxAjcH6ocnePiECzmAgBlTs+sindVJgvIRUxOYIFPiBjobUlGD9tua8E7iCgRLKbtcCOQKOITWCjg/9wMAEo1lYrNLilxa/svgDarqIk5VexwXKHQMNM2qcVLfw9+bGwH19wx3MHIA2p/pzvMLeuoF5inQ6vlkIxIKsMwrY8d2KiBAsgGCK3LENognAkFNTS3UcC7iDPIfU1GgeEQAXUAshVQv1u0PfNZpmOMgTp/JkX/1vvUA4agSZV6jy4ytUIB4oJE4qtc+jH+Hz6K29TQJGOhgaihqQSWFra306+YCeBxTd+oAeJBoMiZDi+V0/7avXmls/sERTsw58gQPkC3gKKtfgeTzIDvAMqCVk29zXq8ZvfdcAtQImTEmNj220Xx8fqIEEAwEhKaC9Z1Qr2B8vAQINYOKQVPqaN6nBHACGKaME7Ovzecc8CrwFq+fLOUWg1n7fXlA+TySYCQRV1ccX0AF/UC61BkZpaF52BhkAw5VRY/z4ByiAViCVcDLXAOhdGWggoArT3q4BUWFYZRcsm3gZ2oVL2J/UMDjdywsw3wjrLBa2URtE2SCKwgLnofMdMZiuqRS3f+UCTCVg8upxi5u/RNG6w41vl/sTtbgB2bxTwJO2L64RDYFZmFNY6oaIBkBXOIWg/QsYrSOYFkg3eLcAIAOaQ2c93frePtnrNNlp72Z5++vX/wHgSYUxGkcAAA=="; \ No newline at end of file +window.searchData = "data:application/octet-stream;base64,H4sIAAAAAAAAE71dW2/cxg7+L/Kr6mpGd78VcdH25AQIzqUvi8BQdse2jndXeyStEyPIfy9GGkmkltRt1Ty5tcn5SH4k5yJp8s3Ksy+Fdbf5Zr2kx511J23rmByUdWelx536atnWOd9bd9Yh2533qvi5+u3tc3nYW7a13SdFoQrrzrK+280IgdcO8aSOKk9K9THPssff8uT03I73eD5uyzQ7NiNeihIgtnVKcnUsW+tI1FeVp49vEzD7gosR1eGz2k0A7Mld6eGverTdJGBOYZ4FXW7s088/b/O3U5k9nMt0X1ykyT79/AAFpmZMkT4dCS8uhtNyw8ZfmDgQySmIteQamLVEcU5L9cduCjRSWMOCptjeq0muA/HF6NKJQ+F3GfTL/qmFLt9OVMr8sn9aDe5d/joG9y5/XQ3u9+J5DO73YqT6ZsC9L9/G4N6XV3DnBx2UertXZQJrPj2WKn9MtiRsK74YXTiyy1z9Y/funL+qJfA3SH2aJWAMzqjnBNA9xxyjuJohVfc9F/9Ux6dymUX9Ea4xDU8XJz3pDMwW4O9zlxe/DM6A/cEJrfHchNYPTCTzLLnQWcGOQm0fTrl6TL9OMKATXgE53z1OR+6EV0D+WuymI3fCS5FdiXzWDbfFfU3yNPlM5XQjugJqTVuWPU6AbWVXwt2ls6Ab8TW9/jPZn6dEHCusZcH58z7dvldv//jyMtUGoLJmHD6e81NWzIqEUVnJiuRcPqtjmW4TXWofVPmc7SZaQ6muZVVRqHy+QVhrJVuqDr8kPpeKK1mkvp7SXBUTzTDSK2Hn6jV7UVNDYKRXwt7mKiknYxvpFbD1dLNLSlWmhymVCsWXouN1V7UfIFdc1V8G11oR2D+1W9BuMHU8H+BQUGjc+NowsJLtNhlquyuS6TA3jfwIGNKkkfMieTgVM1y86TTWQS9med5pLESHe7s/QdP5lyrO+5LbQ9RDXsovYB3uX+quB6p0DuwN0B4LBuHpiEn32fZ80MNcZRoYZT0TvyT5MT0+sRvyYdOAdmdSWex+Souf0uOzytO6bV5losrzLF9oYKu7qnm9M42PSZoPm2eErszwei04HemmVRjLl8YJDjhPX5OSPTWhkFuN+dC96H5QZbJLymQUvRG8Msrb7Fjm2X6vxjlFiDdIcYLbrWNc3vdWW9Os6LRWMKG/6JpmQqe1ggng/HouH0hzmSn908speXj/Q3Pwfiz/pvW7xrkrc/F+MA9XMWVqTt4P5uMqpszJzfvRvFzFpIlzRGcNMUesY8jEOQNYQswZi02R3fbjY5593qtDfcbc2mMwgDFIbn4B+0LC+i3K/Lwts3wW4A1WHOtZ2DOGCHTEOMkIo7EK+jbbzUQ3Guv4npb7uc4blVXwd/Nz7qbVWWoBnLN+nbB2rmWunLGm7SMAFL13oD01XlyxP4DAxJ5gKizuKtnjw2/186A0Oz5U0sNUEwo/qM9wyPMbDuX04s7DmnXRgqZNBPPsG+9NA2Hb/f32TehefAAv2tjfYeGU/saaSDS6tWwEdfoh2T9m+UHtHmq1sTIl5X9ElfLAM4uU9nhZjQ4YtbBEZ1k3UqGDIVtUoPNiN1KfQ8FbWJ6z7ButzgEDFxfnqIWwNtPikJTb51a4/1SSMJlR+SEVOoQ9t0g51xfW6aBpS0t1po1j1ToSvmUFOzeOYzU7HMilZTvTyvHKHTRzefFOsBPU7x/H12Sf7h7gYflD71H1pe0DWj+iisfgZxbyUAyW1fKogQvLeYGlIxU9IZSLinpJTEfqejyoC0t7ga2j1T1q7OICn2gtqPH/HiuM6gnk6OL5UvhHVDSDOrOQCUeX1S9nzsKynW7XSLXyYVpUpDPiNVKbbMAWluR0y0YrkTNtcQEO2oZfvuFfd571onP9OtLHoVdcmy9FOsHxsr14pdb33e7c8QG/2ToJ7gb9ph1hxAQ8DnKWDEda6EcFhSrfZfmwgUhyQUAgZv/ZIY03+eHh0AdCyX573ielMsb/Dr8vIHkgFK6zIDu+qrz8T/bvcizEWPQq1C+66B7+f052xSAkkJuPB4/WPySnd9mxHHitpwbs5ObjoWaVss88SaCbdPgJfF+J7ZF1WszD7pSWGQAWICYtPyQnsk/Xo3RC84M8Z8FBok1eaPS10VdZXudyWqrDDHgjfj3uc1LMgK2lr0c1uTIHGqisgz8TeymuJ2LPdbra6k8Llx9K33QiNGL/O9s+xMB7mQQaKb0MmHp1i/YPiC2DIl8wJrB6csvAqO9fCSwstjiE/e8lmQxppQaBPtnmv+6+6XdTizQ7WneWvHVvY8u2HlO13+nP+Wt429pmh+p900/mb38q3eG0RC3ys2PZG8f2vFspo0+f7E2jUf2h+kUlJix7IygxgcSkZW8kJSaRmGvZG5cSc5GYZ9kbjxLzkJhv2RufEvORWGDZm4ASC5BYaNmbkBILkVhk2ZuIEouQWGzZm9h241shkVSMo6uDLUgaRI+HigiaCUyF0CEXJBkCsyF01AXJh8CECB144RH+CEyJ0KEXJCkCsyJ09AXJi8DECE2AIKkRmBuhORAkOwLTIzQPIiYlMUVS8yBJiiSmSGoeJEmR7FVLVS50vWCKpOZBkhRJTJHUREiyaiTmSGoiJMmRxBxJTYQkOZKYI6mJkCRHEnMkNRGS5EhijqQmQpIcScyRq4lwSY5czJGriXBJjlzMkauJcEmO3F5Tq7oa3dYwR64mwiU5cjFHribCJTlyMUeuJsIlOXIxR64mwiU5cjFHribCjYhydzFFrubBJSlyMUWe5sEjKfIwRZ7mwSMp8jBFnubBk4SZHmbI0zR4JENeb+apph567sEMeZoGz6fAMUGeZsELbNe/Df0AS2KCPM2CRxLkYYI8TYNHEeRhgjzNgkdNRB7mx9ck+A4h6GN6fM2BLyhBzI6vOfApdnzMjq8p8F1KEJPjawZ8ahbyewuDamVAceNjbvyAdQZT44esM5gZP2Kdwcz4MesMZiZwOK4DzEwgOK4DzExQMUMviTA1QUVNaMvoVrgelsTcBBU3kS3jWz/wsSQmJ6jIiUnJ3sKtWrk5pCSmJ9AkBIKUxPwEEVuNASYoiLkCDzBBocMOGWKGQs1DQM4pIaYolGzgQ0xR6LKBDzFFoccGPsQUhT4b+BBTFAZs4MPe8rqiiOzBIaYojHjfMUVhzPuOOYoc1vcIcxQJ1vcIcxRJ1vcIcxRpIgJyVokwR5HH+h5hjiKf9T3CHEUB7zvmKAp533u7oIj3HXMUaSICciUTYY5ih/U9xhzFgvU9xhzFkvU9xhzFLut7jDmKPdb3GHMUV3VENtoYcxQHvO+YozjkfcccxRHve2+zGvO+9zesDut8/TcoW5USvXFzeptWpyomeuvm9LatTkUVvXlzehtXRzMSMjvs3t7V0ZyE9B7b6e1eHc1KSO+ynd7+1amOFuh9ttPbwTqamZBsFfXfoKzmJqQ3206Pt+o4IaS32xdHDZqbkOatf9hQHSmENG/94wbBryZE/8ChOlcIaY77Zw7VyUJEc9w/dajOFiLmHKXHW3W6ENEc908eqvMFcskp+kcP1QEDuW0RvbMHIdlVuegdPojqiIHcswnZPyCSzOGU6J0+iOqMgT73MccP1bGkfg6odn/Ux5ObTfsE+Zv1YM4sg7g5GP1m6Rq9+/b9e3dIqf9PD57ouwE7JeFAJUanfQRqnivUV6t1g4QSDhJwo1TvUXRaPrA38LWYbYVe/TNyzM+g/hlL8zNiBwePmABGCDBcg2HGCiODYTBjgxkbTF0vLFj7JSnAEh2WL+oxfD4YFaNlVtQPgUEwXRjMkNOvHuQ3F3OBJIigtmS1X1ESAMuFYHXAZ5nAaw94bSLpx+Nj6Kev3SjAapZh9HCiU3WBrmso1eVIj1I/RwHAMHUFF+32cS2gKYSKQ3CFKg/64SzQhSQJjqTuIR4wFzHFJVfz9g1IC6AXhCb/TZ5Hph4iE7zYsKjXGvT45koYwAEoZZcjv7qgt7pV56m+ww/oA3VO23yV1il5QaflGV8C40PImt58dg0yGKSAb2Lhc5nQPLZKaEcE7MFcIJoxXvSjr04XqMZNGnO50QzBWAGMYAaoHk6DvILTgPB5JYwDGGCT8bmnAwuOcw83hxDCsJOUuVK7UwMu8Tr1657wRrCHg3lBG1SPD9oTx2pamILd9hs6mmI9Vv1FvfW7UwiZdDha6vcbgBZMQsEBEnA+8NP3m1mQa8h6gOa5KuAXzAei6SSSq+oX9XZon24DQ0DIfKcZhGPxRb2dqitmQG8A3dnjavmlRPUn4MTLGXx5OTewGhg9oI4uawXwcFLgYg4uHQPdEzjrcs6CNyZBYsNJzOF6xaH9ZKk2XdVvOYNhQORCzoBDcto274OBXPWhBVxtHbrvLmoTTs2HUMAIkHgRR5+5gndvruAF0YeR4FzQArttfcMxUIUucMS3lzeAHAVl6pmOH3AcnOqP2S9ndh80x4Drp3XQnrovQi8pBG05ZI0wd2EAH0DSembuDbg6hZfYdkNIgMzOd91dsEAREC65LtfebQIiBrLVN3sCnyu49gY64DNoTh7ra3N3HNADzdzjQoyuy7yciVwwhjs4BroDlBgIOOFyTqC7HYEuSHiXS3h8ny1gDeSr5NYM6DJNAAzXiVyJw0uaASwIHLs5AHf/Ak3grRzyttIkGpMEdS65DtcO8FpfwAvUQWeSXGfqrsJ9UW//07fnggFAjUouz9EVoiDioMZcrsb6d6oS2QZq3OVqvP7nKUBLAo2NUTFfQAAdkNaB2ROEZiUTmT15ZPY/san9mOOk33B8EMjALG3C5lDBNPDIgMUGLOYYO8MPHS7bMXA+5pIOxjw3LyOCdgNi7slmMcf52t3vCEYA5eZx5dYo7trbF8EAIHs8LnuafyUErAnAVDSoxGzCJEgCdtFYj6DMv9pCDwTNHxyGsQOYwah3l7wAVNBwPJNOAccbeq0fhBCuwh2u6PH9uaBcQfa5XO3Bi+GBzyBn6FX/J9s6pSe1T4/Kutt8+v79L/judEInaQAA"; \ No newline at end of file diff --git a/docs/classes/index.DI_ECDSA.html b/docs/classes/index.DI_ECDSA.html deleted file mode 100644 index 3a5448c..0000000 --- a/docs/classes/index.DI_ECDSA.html +++ /dev/null @@ -1,62 +0,0 @@ -DI_ECDSA | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Real instantiation of a DI cryptosuite: ecdsa-2022.

-

Hierarchy

  • DataIntegrity
    • DI_ECDSA

Constructors

Properties

_algorithm: string
_cryptosuite: string
_hash: string
_curve: string

Methods

  • Generate a (separate) proof graph (or graphs), per the DI spec. The signature is stored in -multibase format, using base64url encoding.

    -

    This is just a wrapper around generateAProofGraph to take care of multiple key pairs.

    -

    Parameters

    • dataset: DatasetCore<Quad, Quad>
    • keyPair: Iterable<KeyPair>

    Returns Promise<DatasetCore<Quad, Quad>[]>

    Throws

      -
    • an error if there was a key issue while signing.
    • -
    -
  • Parameters

    • dataset: DatasetCore<Quad, Quad>
    • keyPair: KeyPair

    Returns Promise<DatasetCore<Quad, Quad>>

  • Verify the separate proof graph.

    -

    For now, this methods just does the minimum as a proof of concept. A more elaborate version will have -to verify all details of the proof graph.

    -

    Parameters

    • dataset: DatasetCore<Quad, Quad>
    • proofGraph: DatasetCore<Quad, Quad>

    Returns Promise<boolean>

  • Parameters

    • dataset: DatasetCore<Quad, Quad>
    • proofGraph: DatasetCore<Quad, Quad>[]

    Returns Promise<boolean[]>

  • Create a new dataset with the copy of the original and the proof graph as a separate graph within the -dataset.

    -

    The separate quad with the proof property is added; if the anchor is properly defined, then that -will be the subject, otherwise a new blank node. (The latter may be meaningless, but makes it easier -to find the proof graph for verification.)

    -

    If the keyPair argument is an Array, then the proof graphs are considered to be a Proof Chain. Otherwise, -(e.g., if it is a Set), it is a Proof Set.

    -

    Just wrapper around generateProofGraph.

    -

    Parameters

    • dataset: DatasetCore<Quad, Quad>
    • keyPair: KeyPair | Iterable<KeyPair>
    • Optional anchor: Quad_Subject

    Returns Promise<DatasetCore<Quad, Quad>>

  • Verify the dataset with embedded proof graphs. The individual proof graphs are identified by the presence -of a type relationship to DataIntegrityProof; the result is the conjunction of the validation result for -each proof graphs separately.

    -

    The following checks are also made and, possibly, exception are raised with errors according to -the DI standard:

    -
      -
    1. There should be exactly one proof value
    2. -
    3. There should be exactly one verification method, which should be a separate resource containing the key
    4. -
    5. The key's possible expiration and revocation dates are checked and compared to the current time which should be "before"
    6. -
    7. The proof's creation date must be before the current time
    8. -
    9. The proof purpose(s) must be set, and the values are either authentication or verification
    10. -
    -

    Parameters

    • dataset: DatasetCore<Quad, Quad>

    Returns Promise<VerificationResult>

  • Returns void

  • Import a JWK encoded key into a key usable by crypto.subtle.

    -

    Parameters

    • key: JsonWebKey

      the key itself

      -
    • type: Confidentiality

      whether this is a private or public key (usable to sign or verify, respectively)

      -

    Returns Promise<CryptoKey>

  • Generate a (separate) proof graph, per the DI spec. The signature is stored in -multibase format, using base64url encoding.

    -

    Parameters

    • hashValue: string

      this is the value of the Dataset's canonical hash

      -
    • keyPair: KeyPair

    Returns Promise<DatasetCore<Quad, Quad>>

  • Check one proof graph, ie, whether the included signature corresponds to the hash value.

    -

    The following checks are also made and, possibly, exception are raised with errors according to -the DI standard:

    -
      -
    1. There should be exactly one proof value
    2. -
    3. There should be exactly one verification method, which should be a separate resource containing the key
    4. -
    5. The key's possible expiration and revocation dates are checked and compared to the current time which should be -"before"
    6. -
    7. The proof's creation date must be before the current time
    8. -
    9. The proof purpose(s) must be set, and the values are either authentication or verification
    10. -
    -

    Parameters

    • hash: string
    • proof: Store<Quad, Quad, Quad, Quad>
    • Optional proofId: Quad_Graph

    Returns Promise<boolean>

\ No newline at end of file diff --git a/docs/classes/lib_errors.Invalid_Verification_Method.html b/docs/classes/lib_errors.Invalid_Verification_Method.html deleted file mode 100644 index 3242e16..0000000 --- a/docs/classes/lib_errors.Invalid_Verification_Method.html +++ /dev/null @@ -1,11 +0,0 @@ -Invalid_Verification_Method | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

-

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

type: string

The vocabulary URL for the entry

-
code: number

The error code

-
title: string

Title (essentially the error type name)

-
detail: string

More detailed description of the error condition

-
\ No newline at end of file diff --git a/docs/classes/lib_errors.Malformed_Proof_Error.html b/docs/classes/lib_errors.Malformed_Proof_Error.html deleted file mode 100644 index 3da65d7..0000000 --- a/docs/classes/lib_errors.Malformed_Proof_Error.html +++ /dev/null @@ -1,11 +0,0 @@ -Malformed_Proof_Error | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

-

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

type: string

The vocabulary URL for the entry

-
code: number

The error code

-
title: string

Title (essentially the error type name)

-
detail: string

More detailed description of the error condition

-
\ No newline at end of file diff --git a/docs/classes/lib_errors.Mismatched_Proof_Purpose.html b/docs/classes/lib_errors.Mismatched_Proof_Purpose.html deleted file mode 100644 index 245ba75..0000000 --- a/docs/classes/lib_errors.Mismatched_Proof_Purpose.html +++ /dev/null @@ -1,11 +0,0 @@ -Mismatched_Proof_Purpose | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

-

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

type: string

The vocabulary URL for the entry

-
code: number

The error code

-
title: string

Title (essentially the error type name)

-
detail: string

More detailed description of the error condition

-
\ No newline at end of file diff --git a/docs/classes/lib_errors.ProblemDetail.html b/docs/classes/lib_errors.ProblemDetail.html deleted file mode 100644 index e9c8113..0000000 --- a/docs/classes/lib_errors.ProblemDetail.html +++ /dev/null @@ -1,11 +0,0 @@ -ProblemDetail | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

-

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

type: string

The vocabulary URL for the entry

-
code: number

The error code

-
title: string

Title (essentially the error type name)

-
detail: string

More detailed description of the error condition

-
\ No newline at end of file diff --git a/docs/classes/lib_errors.Proof_Generation_Error.html b/docs/classes/lib_errors.Proof_Generation_Error.html deleted file mode 100644 index 5e2eee6..0000000 --- a/docs/classes/lib_errors.Proof_Generation_Error.html +++ /dev/null @@ -1,11 +0,0 @@ -Proof_Generation_Error | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

-

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

type: string

The vocabulary URL for the entry

-
code: number

The error code

-
title: string

Title (essentially the error type name)

-
detail: string

More detailed description of the error condition

-
\ No newline at end of file diff --git a/docs/classes/lib_types.Invalid_Verification_Method.html b/docs/classes/lib_types.Invalid_Verification_Method.html new file mode 100644 index 0000000..37d03ce --- /dev/null +++ b/docs/classes/lib_types.Invalid_Verification_Method.html @@ -0,0 +1,11 @@ +Invalid_Verification_Method | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

+

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

type: string

The vocabulary URL for the entry

+
code: number

The error code

+
title: string

Title (essentially the error type name)

+
detail: string

More detailed description of the error condition

+
\ No newline at end of file diff --git a/docs/classes/lib_types.Malformed_Proof_Error.html b/docs/classes/lib_types.Malformed_Proof_Error.html new file mode 100644 index 0000000..f3dc092 --- /dev/null +++ b/docs/classes/lib_types.Malformed_Proof_Error.html @@ -0,0 +1,11 @@ +Malformed_Proof_Error | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

+

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

type: string

The vocabulary URL for the entry

+
code: number

The error code

+
title: string

Title (essentially the error type name)

+
detail: string

More detailed description of the error condition

+
\ No newline at end of file diff --git a/docs/classes/lib_types.Mismatched_Proof_Purpose.html b/docs/classes/lib_types.Mismatched_Proof_Purpose.html new file mode 100644 index 0000000..03f70d4 --- /dev/null +++ b/docs/classes/lib_types.Mismatched_Proof_Purpose.html @@ -0,0 +1,11 @@ +Mismatched_Proof_Purpose | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

+

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

type: string

The vocabulary URL for the entry

+
code: number

The error code

+
title: string

Title (essentially the error type name)

+
detail: string

More detailed description of the error condition

+
\ No newline at end of file diff --git a/docs/classes/lib_types.ProblemDetail.html b/docs/classes/lib_types.ProblemDetail.html new file mode 100644 index 0000000..68ff0cc --- /dev/null +++ b/docs/classes/lib_types.ProblemDetail.html @@ -0,0 +1,11 @@ +ProblemDetail | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

+

Hierarchy (view full)

Constructors

Properties

Constructors

  • Parameters

    • detail: string
    • title: string
    • code: number

    Returns ProblemDetail

Properties

type: string

The vocabulary URL for the entry

+
code: number

The error code

+
title: string

Title (essentially the error type name)

+
detail: string

More detailed description of the error condition

+
\ No newline at end of file diff --git a/docs/classes/lib_types.Proof_Generation_Error.html b/docs/classes/lib_types.Proof_Generation_Error.html new file mode 100644 index 0000000..93d1b7e --- /dev/null +++ b/docs/classes/lib_types.Proof_Generation_Error.html @@ -0,0 +1,11 @@ +Proof_Generation_Error | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

+

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

type: string

The vocabulary URL for the entry

+
code: number

The error code

+
title: string

Title (essentially the error type name)

+
detail: string

More detailed description of the error condition

+
\ No newline at end of file diff --git a/docs/classes/lib_types.Unclassified_Error.html b/docs/classes/lib_types.Unclassified_Error.html new file mode 100644 index 0000000..c8dfc69 --- /dev/null +++ b/docs/classes/lib_types.Unclassified_Error.html @@ -0,0 +1,11 @@ +Unclassified_Error | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Superclass for the various error conditions. The entries are based on the DI specification.

+

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

type: string

The vocabulary URL for the entry

+
code: number

The error code

+
title: string

Title (essentially the error type name)

+
detail: string

More detailed description of the error condition

+
\ No newline at end of file diff --git a/docs/classes/lib_utils.DatasetMap.html b/docs/classes/lib_utils.DatasetMap.html index 13218c4..ca6d994 100644 --- a/docs/classes/lib_utils.DatasetMap.html +++ b/docs/classes/lib_utils.DatasetMap.html @@ -1,11 +1,11 @@ DatasetMap | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

A shell around a Map, which is indexed by the value of rdf Terms.

(At the moment, the map value is a structure, that also includes the original term; that may become unnecessary on long term.)

-

Constructors

Constructors

Methods

Constructors

Methods

  • Create a new dataset, if needed, otherwise returns the +

Constructors

Methods

  • Create a new dataset, if needed, otherwise returns the dataset already stored.

    -

    Parameters

    • graph: Quad_Graph

    Returns Store<Quad, Quad, Quad, Quad>

  • Parameters

    • graph: Term

    Returns boolean

  • Returns Store<Quad, Quad, Quad, Quad>[]

\ No newline at end of file +

Parameters

  • graph: Quad_Graph

Returns Store<Quad, Quad, Quad, Quad>

  • Parameters

    • graph: Term

    Returns boolean

  • Returns Store<Quad, Quad, Quad, Quad>[]

\ No newline at end of file diff --git a/docs/enums/index.Confidentiality.html b/docs/enums/index.Confidentiality.html deleted file mode 100644 index f4edbe0..0000000 --- a/docs/enums/index.Confidentiality.html +++ /dev/null @@ -1,4 +0,0 @@ -Confidentiality | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/enums/lib_types.Cryptosuites.html b/docs/enums/lib_types.Cryptosuites.html new file mode 100644 index 0000000..1c62d72 --- /dev/null +++ b/docs/enums/lib_types.Cryptosuites.html @@ -0,0 +1,4 @@ +Cryptosuites | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/index.embedProofGraph.html b/docs/functions/index.embedProofGraph.html new file mode 100644 index 0000000..fa2e294 --- /dev/null +++ b/docs/functions/index.embedProofGraph.html @@ -0,0 +1,6 @@ +embedProofGraph | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
  • Create a new dataset with the copy of the original and the proof graph(s) as a separate graph(s) within the +dataset (a.k.a. "Embedded Proof" in the DI spec terminology).

    +

    If the anchor is defined, then that will be the subject for quads with the proof property is added (one for each proof graph).

    +

    If the keyPair argument is an Array, then the proof graphs are considered to be a Proof Chain. Otherwise, +(e.g., if it is a Set), it is a Proof Set.

    +

    Parameters

    • dataset: DatasetCore<Quad, Quad>
    • keyData: KeyData | Iterable<KeyData>
    • Optional anchor: Quad_Subject

    Returns Promise<rdf.DatasetCore>

\ No newline at end of file diff --git a/docs/functions/index.generateProofGraph.html b/docs/functions/index.generateProofGraph.html new file mode 100644 index 0000000..96a606e --- /dev/null +++ b/docs/functions/index.generateProofGraph.html @@ -0,0 +1,6 @@ +generateProofGraph | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
  • Generate a (separate) proof graph (or graphs), per the DI spec. The signature is stored in +multibase format, using base64url encoding. Keys are accepted, and stored in JWK format.

    +

    Parameters

    • dataset: DatasetCore<Quad, Quad>
    • keyData: Iterable<KeyData>

    Returns Promise<rdf.DatasetCore[]>

    Throws

      +
    • an error if there was a key issue while signing.
    • +
    +
  • Parameters

    • dataset: DatasetCore<Quad, Quad>
    • keyData: KeyData

    Returns Promise<rdf.DatasetCore>

\ No newline at end of file diff --git a/docs/functions/index.verifyEmbeddedProofGraph.html b/docs/functions/index.verifyEmbeddedProofGraph.html new file mode 100644 index 0000000..b6b20ab --- /dev/null +++ b/docs/functions/index.verifyEmbeddedProofGraph.html @@ -0,0 +1,17 @@ +verifyEmbeddedProofGraph | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
  • Verify the dataset with embedded proof graph(s).

    +

    If the anchor is present, the proof graphs are identified by the object terms of the corresponding proof quads. +Otherwise, the type relationship to DataIntegrityProof are considered. Note that if no anchor is provided, this second choice +may lead to erroneous results because some of the embedded proof graphs are not meant to be a proof for the full dataset. (This may +be the case in a "Verifiable Presentation" style datasets.)

    +

    The validity result is the conjunction of the validation result for each proof graphs separately.

    +

    The following checks are also made.

    +
      +
    1. There should be exactly one proof value
    2. +
    3. There should be exactly one verification method, which should be a separate resource containing the key (in JWK)
    4. +
    5. The key's (optional) expiration and +revocation dates are checked and compared to the current time which should be "before"
    6. +
    7. The proof's creation date must be before the current time
    8. +
    9. The proof purpose(s) must be set, and the values are either authentication or verification
    10. +
    +

    If any of those errors occur, the overall validity result is false. The error reports themselves, with some more details, are part of the verification result structure.

    +

    Parameters

    • dataset: DatasetCore<Quad, Quad>
    • Optional anchor: Quad_Subject

    Returns Promise<VerificationResult>

\ No newline at end of file diff --git a/docs/functions/index.verifyProofGraph.html b/docs/functions/index.verifyProofGraph.html new file mode 100644 index 0000000..baa5a05 --- /dev/null +++ b/docs/functions/index.verifyProofGraph.html @@ -0,0 +1,13 @@ +verifyProofGraph | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
  • Verify the separate proof graph.

    +

    The validity result is the conjunction of the validation result for each proof graphs separately.

    +

    The following checks are made:

    +
      +
    1. There should be exactly one proof value
    2. +
    3. There should be exactly one verification method, which should be a separate resource containing the key (in JWK)
    4. +
    5. The key's (optional) expiration and +revocation dates are checked and compared to the current time which should be "before"
    6. +
    7. The proof's creation date must be before the current time
    8. +
    9. The proof purpose(s) must be set, and the values are either authentication or verification
    10. +
    +

    If any of those errors are found, the validation result is false. The error reports themselves, with some more details, are part of the verification result structure.

    +

    Parameters

    • dataset: DatasetCore<Quad, Quad>
    • proofGraph: DatasetCore<Quad, Quad> | DatasetCore<Quad, Quad>[]

    Returns Promise<VerificationResult>

\ No newline at end of file diff --git a/docs/functions/lib_crypto_utils.cryptosuiteId.html b/docs/functions/lib_crypto_utils.cryptosuiteId.html new file mode 100644 index 0000000..899c933 --- /dev/null +++ b/docs/functions/lib_crypto_utils.cryptosuiteId.html @@ -0,0 +1,3 @@ +cryptosuiteId | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_crypto_utils.generateKey.html b/docs/functions/lib_crypto_utils.generateKey.html new file mode 100644 index 0000000..07b392d --- /dev/null +++ b/docs/functions/lib_crypto_utils.generateKey.html @@ -0,0 +1,4 @@ +generateKey | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_crypto_utils.sign.html b/docs/functions/lib_crypto_utils.sign.html new file mode 100644 index 0000000..e0cfdb7 --- /dev/null +++ b/docs/functions/lib_crypto_utils.sign.html @@ -0,0 +1,6 @@ +sign | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_crypto_utils.verify.html b/docs/functions/lib_crypto_utils.verify.html new file mode 100644 index 0000000..f7ed0e9 --- /dev/null +++ b/docs/functions/lib_crypto_utils.verify.html @@ -0,0 +1,4 @@ +verify | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_proof_utils.generateAProofGraph.html b/docs/functions/lib_proof_utils.generateAProofGraph.html new file mode 100644 index 0000000..c29de35 --- /dev/null +++ b/docs/functions/lib_proof_utils.generateAProofGraph.html @@ -0,0 +1,5 @@ +generateAProofGraph | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_proof_utils.rdf_prefix.html b/docs/functions/lib_proof_utils.rdf_prefix.html new file mode 100644 index 0000000..8bdfa9f --- /dev/null +++ b/docs/functions/lib_proof_utils.rdf_prefix.html @@ -0,0 +1 @@ +rdf_prefix | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_proof_utils.sec_prefix.html b/docs/functions/lib_proof_utils.sec_prefix.html new file mode 100644 index 0000000..0d00c41 --- /dev/null +++ b/docs/functions/lib_proof_utils.sec_prefix.html @@ -0,0 +1,4 @@ +sec_prefix | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_proof_utils.verifyAProofGraph.html b/docs/functions/lib_proof_utils.verifyAProofGraph.html new file mode 100644 index 0000000..8f58161 --- /dev/null +++ b/docs/functions/lib_proof_utils.verifyAProofGraph.html @@ -0,0 +1,15 @@ +verifyAProofGraph | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
  • Check one proof graph, ie, whether the included signature corresponds to the hash value.

    +

    The following checks are also made:

    +
      +
    1. There should be exactly one proof value
    2. +
    3. There should be exactly one verification method, which should be a separate resource containing the key (in JWK)
    4. +
    5. The key's (optional) expiration and +revocation dates are checked and compared to the current time which should be "before"
    6. +
    7. The proof's creation date must be before the current time
    8. +
    9. The proof purpose(s) must be set, and the values are either authentication or verification
    10. +
    +

    Errors are stored in the report structure. If any error occurs, the result is false.

    +

    Parameters

    • report: Errors

      placeholder for error reports

      +
    • hash: string
    • proof: Store<Quad, Quad, Quad, Quad>

      the proof graph

      +
    • Optional proofId: Quad_Graph

      Id of the proof graph, if known; used in the error reports only

      +

    Returns Promise<boolean>

\ No newline at end of file diff --git a/docs/functions/lib_proof_utils.xsd_prefix.html b/docs/functions/lib_proof_utils.xsd_prefix.html new file mode 100644 index 0000000..9143180 --- /dev/null +++ b/docs/functions/lib_proof_utils.xsd_prefix.html @@ -0,0 +1 @@ +xsd_prefix | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_utils.arrayBufferToBase64Url.html b/docs/functions/lib_utils.arrayBufferToBase64Url.html deleted file mode 100644 index f1a4392..0000000 --- a/docs/functions/lib_utils.arrayBufferToBase64Url.html +++ /dev/null @@ -1,3 +0,0 @@ -arrayBufferToBase64Url | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_utils.base64UrlToArrayBuffer.html b/docs/functions/lib_utils.base64UrlToArrayBuffer.html deleted file mode 100644 index 8aef313..0000000 --- a/docs/functions/lib_utils.base64UrlToArrayBuffer.html +++ /dev/null @@ -1,3 +0,0 @@ -base64UrlToArrayBuffer | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_utils.calculateDatasetHash.html b/docs/functions/lib_utils.calculateDatasetHash.html index f8bb060..82ee7c1 100644 --- a/docs/functions/lib_utils.calculateDatasetHash.html +++ b/docs/functions/lib_utils.calculateDatasetHash.html @@ -1,2 +1,2 @@ calculateDatasetHash | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file +

Parameters

  • dataset: DatasetCore<Quad, Quad>

Returns Promise<string>

\ No newline at end of file diff --git a/docs/functions/lib_utils.convertToStore.html b/docs/functions/lib_utils.convertToStore.html index cd37089..5a69f8d 100644 --- a/docs/functions/lib_utils.convertToStore.html +++ b/docs/functions/lib_utils.convertToStore.html @@ -1,3 +1,3 @@ convertToStore | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
  • Convert the dataset into an n3.Store, unless it is already a store. This is done to manage the various quads more efficiently.

    -

    Parameters

    • dataset: DatasetCore<Quad, Quad>

    Returns n3.Store

\ No newline at end of file +

Parameters

  • dataset: DatasetCore<Quad, Quad>

Returns n3.Store

\ No newline at end of file diff --git a/docs/functions/lib_utils.createPrefix.html b/docs/functions/lib_utils.createPrefix.html index 99575ac..87bd54b 100644 --- a/docs/functions/lib_utils.createPrefix.html +++ b/docs/functions/lib_utils.createPrefix.html @@ -1,4 +1,4 @@ createPrefix | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
  • A simple namespace handler; I was not sure I fully understood the n3 version, and I found no reliable documentation (...)

    This function returns a function that can be used to generate a proper URI for a given prefix.

    -

    Parameters

    • uri: string

    Returns ((l) => rdf.NamedNode)

      • (l): rdf.NamedNode
      • Parameters

        • l: string

        Returns rdf.NamedNode

\ No newline at end of file +

Parameters

  • uri: string

Returns ((l) => rdf.NamedNode)

    • (l): rdf.NamedNode
    • Parameters

      • l: string

      Returns rdf.NamedNode

\ No newline at end of file diff --git a/docs/functions/lib_utils.isDatasetCore.html b/docs/functions/lib_utils.isDatasetCore.html index fc64751..6560701 100644 --- a/docs/functions/lib_utils.isDatasetCore.html +++ b/docs/functions/lib_utils.isDatasetCore.html @@ -1,2 +1,2 @@ isDatasetCore | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file +

Parameters

  • obj: any

Returns obj is DatasetCore<Quad, Quad>

\ No newline at end of file diff --git a/docs/functions/lib_utils.isKeyData.html b/docs/functions/lib_utils.isKeyData.html new file mode 100644 index 0000000..61cb9e6 --- /dev/null +++ b/docs/functions/lib_utils.isKeyData.html @@ -0,0 +1,2 @@ +isKeyData | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_utils.textToArrayBuffer.html b/docs/functions/lib_utils.textToArrayBuffer.html deleted file mode 100644 index c57a60a..0000000 --- a/docs/functions/lib_utils.textToArrayBuffer.html +++ /dev/null @@ -1,2 +0,0 @@ -textToArrayBuffer | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/functions/lib_utils.write_quads.html b/docs/functions/lib_utils.write_quads.html index 4b4f96a..10e94c2 100644 --- a/docs/functions/lib_utils.write_quads.html +++ b/docs/functions/lib_utils.write_quads.html @@ -1 +1 @@ -write_quads | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file +write_quads | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/hierarchy.html b/docs/hierarchy.html index d4b8c60..99b16e9 100644 --- a/docs/hierarchy.html +++ b/docs/hierarchy.html @@ -1 +1 @@ -Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file +Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/index.html b/docs/index.html index dbece79..5e05641 100644 --- a/docs/index.html +++ b/docs/index.html @@ -1,17 +1,24 @@ -Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

rdfjs-di

Data Integrity algorithms for RDF Datasets — Proof of concepts implementation

-

This is a proof-of-concept implementation (in Typescript) of the Verifiable Credentials Data Integrity (DI) specification of the W3C. The DI specification is primarily aimed at Verifiable Credentials (i.e., JSON-LD based data structures to express credentials) but the approach is such that it can be used for any kind of RDF Datasets. This implementation does that.

-

It is proof-of-concepts, because, primarily at validation time it lacks the rigorous checking of the proofs to be validating that is necessary for a security related tool. What it proves, however, that the DI specification may indeed be used to provide a proof for an RDF Dataset in the form of a separate "Proof Graph", i.e., an RDF Graph containing a signature that can be separated by a verifier.

-

The steps for signature follow the "usual" approach for signing data, namely:

+Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Data Integrity algorithms for RDF Datasets — Proof of concepts implementation

This is a proof-of-concept implementation (in Typescript) of the Verifiable Credentials Data Integrity (DI) specification of the W3C. The DI specification is primarily aimed at Verifiable Credentials (i.e., JSON-LD based data structures to express credentials) but the approach is such that it can be used for any kind of RDF Datasets. This implementation does that.

+

It is proof-of-concepts, because, primarily at validation time it doesn't do all the checks that the DI specification describes, and have not (yet) been cross-checked with other DI implementations. What it proves, however, is that the DI specification may indeed be used to provide a proof for an RDF Dataset in the form of a separate "Proof Graph", i.e., an RDF Graph containing a signature that can be separated by a verifier.

+

Some details

The steps for signature follow the "usual" approach for signing data, namely:

  1. The input RDF Dataset is canonicalized, using the RDF Dataset Canonicalization, as defined by the W3C.
  2. -
  3. The resulting canonical N-Quads are sorted, and hashed to yield a canonical hash of the Dataset (the W3C specification relies on SHA-256 for hashing).
  4. -
  5. The hash is signed using a secret key for ECDSA. The signature value is stored as a bas64url following the Multibase format.
  6. +
  7. The resulting canonical N-Quads are sorted, and hashed to yield a canonical hash of the Dataset (the W3C specification relies on SHA-256 for hashing by default, which is used here).
  8. +
  9. The hash is signed using a secret key. The signature value is stored as a base64url value following the Multibase format.
  10. A separate "proof graph" is generated, that includes the signature value, some basic metadata, and the public key of for the signature, stored in JWK format.

The package has separate API entries to generate, and validate, such proof graphs. It is also possible, following the DI spec, to provide "embedded" proofs, i.e., a new dataset, containing the original data, as well as the proof graph(s), each as a separate graph within the dataset. If a separate "anchor" resource is provided, then this new dataset will also contain additional RDF triples connecting the anchor to the proof graphs.

+

The crypto layer for the package relies on the Web Crypto API specification, and its implementation in node.js or deno. Accordingly, the following crypto algorithms are available for this implementation

+

Although not strictly necessary for this package, a separate method is available as part of the API to generate cryptography keys for one of these three algorithms. Note that only ECDSA is part of the VC Working Groups' specification, identified by the cryptosuite name ecdsa-2022; the other two are non-standard, and are identified with the temporary cryptosuite name of rdfjs-di-rsa-pss and rdfjs-di-rsa-ssa, respectively.

+

For more details, see:

+

(Note that the API works on an RDF Data model level, and does not include a Turtle/TriG parser or serializer; that should be done separately.)

-
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/interfaces/index.KeyPair.html b/docs/interfaces/index.KeyPair.html deleted file mode 100644 index e788d2b..0000000 --- a/docs/interfaces/index.KeyPair.html +++ /dev/null @@ -1,9 +0,0 @@ -KeyPair | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Crypto key pair. The keys are stored in JWK format. -At the moment, this seems the dominant format for keys in WebCrypto.

-

The values for controller, expires, and revoked, are all optional (see spec for details)

-
interface KeyPair {
    public: JsonWebKey;
    private: JsonWebKey;
    controller?: string;
    expires?: string;
    revoked?: string;
}

Properties

public: JsonWebKey
private: JsonWebKey
controller?: string
expires?: string
revoked?: string
\ No newline at end of file diff --git a/docs/interfaces/index.VerificationResult.html b/docs/interfaces/index.VerificationResult.html deleted file mode 100644 index fe0ac90..0000000 --- a/docs/interfaces/index.VerificationResult.html +++ /dev/null @@ -1,5 +0,0 @@ -VerificationResult | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
interface VerificationResult {
    verified: boolean;
    verifiedDocument: DatasetCore<Quad, Quad>;
    warnings: ProblemDetail[];
    errors: ProblemDetail[];
}

Properties

verified: boolean
verifiedDocument: DatasetCore<Quad, Quad>
warnings: ProblemDetail[]
errors: ProblemDetail[]
\ No newline at end of file diff --git a/docs/interfaces/lib_crypto_utils.KeyDetails.html b/docs/interfaces/lib_crypto_utils.KeyDetails.html new file mode 100644 index 0000000..16cc9e6 --- /dev/null +++ b/docs/interfaces/lib_crypto_utils.KeyDetails.html @@ -0,0 +1,5 @@ +KeyDetails | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/interfaces/lib_types.Errors.html b/docs/interfaces/lib_types.Errors.html new file mode 100644 index 0000000..5535ebf --- /dev/null +++ b/docs/interfaces/lib_types.Errors.html @@ -0,0 +1,3 @@ +Errors | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/interfaces/lib_types.KeyData.html b/docs/interfaces/lib_types.KeyData.html new file mode 100644 index 0000000..427bd7d --- /dev/null +++ b/docs/interfaces/lib_types.KeyData.html @@ -0,0 +1,7 @@ +KeyData | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
interface KeyData {
    controller?: string;
    expires?: string;
    revoked?: string;
    cryptosuite?: string;
    public: JsonWebKey;
    private: JsonWebKey;
}

Hierarchy (view full)

Properties

controller?: string
expires?: string
revoked?: string
cryptosuite?: string
public: JsonWebKey
private: JsonWebKey
\ No newline at end of file diff --git a/docs/interfaces/lib_types.KeyMetadata.html b/docs/interfaces/lib_types.KeyMetadata.html new file mode 100644 index 0000000..fc4bb44 --- /dev/null +++ b/docs/interfaces/lib_types.KeyMetadata.html @@ -0,0 +1,5 @@ +KeyMetadata | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
interface KeyMetadata {
    controller?: string;
    expires?: string;
    revoked?: string;
    cryptosuite?: string;
}

Hierarchy (view full)

Properties

controller?: string
expires?: string
revoked?: string
cryptosuite?: string
\ No newline at end of file diff --git a/docs/interfaces/lib_types.KeyPair.html b/docs/interfaces/lib_types.KeyPair.html new file mode 100644 index 0000000..72be998 --- /dev/null +++ b/docs/interfaces/lib_types.KeyPair.html @@ -0,0 +1,3 @@ +KeyPair | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/interfaces/lib_types.VerificationResult.html b/docs/interfaces/lib_types.VerificationResult.html new file mode 100644 index 0000000..abd827f --- /dev/null +++ b/docs/interfaces/lib_types.VerificationResult.html @@ -0,0 +1,5 @@ +VerificationResult | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
interface VerificationResult {
    verified: boolean;
    verifiedDocument: DatasetCore<Quad, Quad>;
    warnings: ProblemDetail[];
    errors: ProblemDetail[];
}

Hierarchy (view full)

Properties

verified: boolean
verifiedDocument: DatasetCore<Quad, Quad>
warnings: ProblemDetail[]
errors: ProblemDetail[]
\ No newline at end of file diff --git a/docs/interfaces/lib_utils.MapContent.html b/docs/interfaces/lib_utils.MapContent.html index e22fc1e..0499ac8 100644 --- a/docs/interfaces/lib_utils.MapContent.html +++ b/docs/interfaces/lib_utils.MapContent.html @@ -1,4 +1,4 @@ MapContent | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Structure with a separate store and its ID as a graph

-
interface MapContent {
    id: Quad_Graph;
    dataset: Store<Quad, Quad, Quad, Quad>;
}

Properties

id +
interface MapContent {
    id: Quad_Graph;
    dataset: Store<Quad, Quad, Quad, Quad>;
}

Properties

Properties

id: Quad_Graph
dataset: Store<Quad, Quad, Quad, Quad>
\ No newline at end of file +

Properties

id: Quad_Graph
dataset: Store<Quad, Quad, Quad, Quad>
\ No newline at end of file diff --git a/docs/modules/index.html b/docs/modules/index.html index 26c4d00..0af885c 100644 --- a/docs/modules/index.html +++ b/docs/modules/index.html @@ -1,6 +1,12 @@ -index | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file +index | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Externally visible API level for the package.

+

References

Re-exports KeyData
Re-exports VerificationResult
Re-exports KeyMetadata
Re-exports Cryptosuites
Re-exports generateKey
Re-exports KeyDetails
\ No newline at end of file diff --git a/docs/modules/lib_crypto_utils.html b/docs/modules/lib_crypto_utils.html new file mode 100644 index 0000000..450fdf7 --- /dev/null +++ b/docs/modules/lib_crypto_utils.html @@ -0,0 +1,17 @@ +lib/crypto_utils | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

"Internal API" to the WebCrypto facilities.

+

Put into a separate file for an easier maintenance; not meant +to be part of the external API. +Most of them are not exported (via index.ts) to +package users.

+

Note that, at the moment, the "interchange format" for keys is restricted to JWK. One +area of improvement may be to allow for other formats (the DI standard refers to Multikey).

+

Index

Interfaces

Type Aliases

Alg +Crv +Hsh +Kty +

Functions

\ No newline at end of file diff --git a/docs/modules/lib_errors.html b/docs/modules/lib_errors.html deleted file mode 100644 index 7f7aaa3..0000000 --- a/docs/modules/lib_errors.html +++ /dev/null @@ -1,6 +0,0 @@ -lib/errors | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/modules/lib_proof_utils.html b/docs/modules/lib_proof_utils.html new file mode 100644 index 0000000..f9eaade --- /dev/null +++ b/docs/modules/lib_proof_utils.html @@ -0,0 +1,24 @@ +lib/proof_utils | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

"Internal API" for handling proof graphs.

+

Put into a separate file for an easier maintenance; not meant +to be part of the external API. +They are not exported (via index.ts) to +package users.

+

Index

Variables

Functions

\ No newline at end of file diff --git a/docs/modules/lib_types.html b/docs/modules/lib_types.html new file mode 100644 index 0000000..57fe56a --- /dev/null +++ b/docs/modules/lib_types.html @@ -0,0 +1,14 @@ +lib/types | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/modules/lib_utils.html b/docs/modules/lib_utils.html index e1916dc..e2d24f2 100644 --- a/docs/modules/lib_utils.html +++ b/docs/modules/lib_utils.html @@ -1,11 +1,14 @@ -lib/utils | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0

Index

Classes

DatasetMap +lib/utils | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/types/lib_crypto_utils.Alg.html b/docs/types/lib_crypto_utils.Alg.html new file mode 100644 index 0000000..6206d88 --- /dev/null +++ b/docs/types/lib_crypto_utils.Alg.html @@ -0,0 +1,2 @@ +Alg | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/types/lib_crypto_utils.Crv.html b/docs/types/lib_crypto_utils.Crv.html new file mode 100644 index 0000000..6782977 --- /dev/null +++ b/docs/types/lib_crypto_utils.Crv.html @@ -0,0 +1,2 @@ +Crv | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/types/lib_crypto_utils.Hsh.html b/docs/types/lib_crypto_utils.Hsh.html new file mode 100644 index 0000000..e69ff03 --- /dev/null +++ b/docs/types/lib_crypto_utils.Hsh.html @@ -0,0 +1,2 @@ +Hsh | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/types/lib_crypto_utils.Kty.html b/docs/types/lib_crypto_utils.Kty.html new file mode 100644 index 0000000..3eb72ef --- /dev/null +++ b/docs/types/lib_crypto_utils.Kty.html @@ -0,0 +1,2 @@ +Kty | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.rdf_type.html b/docs/variables/lib_proof_utils.rdf_type.html new file mode 100644 index 0000000..1254acf --- /dev/null +++ b/docs/variables/lib_proof_utils.rdf_type.html @@ -0,0 +1 @@ +rdf_type | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_assertionMethod.html b/docs/variables/lib_proof_utils.sec_assertionMethod.html new file mode 100644 index 0000000..d417ce2 --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_assertionMethod.html @@ -0,0 +1 @@ +sec_assertionMethod | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_authenticationMethod.html b/docs/variables/lib_proof_utils.sec_authenticationMethod.html new file mode 100644 index 0000000..fa3533c --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_authenticationMethod.html @@ -0,0 +1 @@ +sec_authenticationMethod | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_created.html b/docs/variables/lib_proof_utils.sec_created.html new file mode 100644 index 0000000..c58b56c --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_created.html @@ -0,0 +1 @@ +sec_created | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_di_proof.html b/docs/variables/lib_proof_utils.sec_di_proof.html new file mode 100644 index 0000000..7abfd06 --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_di_proof.html @@ -0,0 +1 @@ +sec_di_proof | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_expires.html b/docs/variables/lib_proof_utils.sec_expires.html new file mode 100644 index 0000000..83a0c93 --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_expires.html @@ -0,0 +1 @@ +sec_expires | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_proof.html b/docs/variables/lib_proof_utils.sec_proof.html new file mode 100644 index 0000000..5621385 --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_proof.html @@ -0,0 +1 @@ +sec_proof | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_proofPurpose.html b/docs/variables/lib_proof_utils.sec_proofPurpose.html new file mode 100644 index 0000000..9416dbf --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_proofPurpose.html @@ -0,0 +1 @@ +sec_proofPurpose | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_proofValue.html b/docs/variables/lib_proof_utils.sec_proofValue.html new file mode 100644 index 0000000..2859615 --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_proofValue.html @@ -0,0 +1 @@ +sec_proofValue | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_publicKeyJwk.html b/docs/variables/lib_proof_utils.sec_publicKeyJwk.html new file mode 100644 index 0000000..9818c63 --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_publicKeyJwk.html @@ -0,0 +1 @@ +sec_publicKeyJwk | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_revoked.html b/docs/variables/lib_proof_utils.sec_revoked.html new file mode 100644 index 0000000..82e0639 --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_revoked.html @@ -0,0 +1 @@ +sec_revoked | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.sec_verificationMethod.html b/docs/variables/lib_proof_utils.sec_verificationMethod.html new file mode 100644 index 0000000..d0b3c0a --- /dev/null +++ b/docs/variables/lib_proof_utils.sec_verificationMethod.html @@ -0,0 +1 @@ +sec_verificationMethod | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/docs/variables/lib_proof_utils.xsd_datetime.html b/docs/variables/lib_proof_utils.xsd_datetime.html new file mode 100644 index 0000000..2e9ef83 --- /dev/null +++ b/docs/variables/lib_proof_utils.xsd_datetime.html @@ -0,0 +1 @@ +xsd_datetime | Proof-of concepts Implementation of the Dataset Integrity Specification on top of rdf-js - v1.0.0
\ No newline at end of file diff --git a/examples/small_with_proofs.ttl b/examples/small_with_proofs.ttl index a671010..906967d 100644 --- a/examples/small_with_proofs.ttl +++ b/examples/small_with_proofs.ttl @@ -46,41 +46,43 @@ _:n3-1 a doap:GitRepository; doap:location . _:b0 { - a sec:DataIntegrityProof; + a sec:DataIntegrityProof; + sec:cryptosuite "rdfjs-di-rsa-pss"; + sec:verificationMethod ; + sec:proofValue "unTRxbY69Jk5C0G3QO93sAISre0pW77Ws_vTUbXbDzDRuptW_rN4Ps5lNlsrxPRBPtJZIG05LKMCeXYJMEcMP30Lje0M2bIiRSEQcr1ucgY-PEmPYXMlOQhuuXUisUzOgaUdD8OPdajpz2JIhj_lFFXgNCiySEsFkxXzT4nnQg0Vi9b-KS8Raqp097glcT-BjGdx4DBbizVocp6XjAvYoBn3tewpLsCeuqr1Nook2bs6Uff-1veDjZgZHtwB0ImRvvYGI499lO9i7j39WxGMqgS7O1VNsAJMAozSAz2Z_Sk3uOqezJwZbDOygxE2RuZ9gMPGLS4M9W1CrkM9heYUD-w"; + sec:created "2024-03-07T11:52:00.821Z"^^xsd:dateTime; + sec:proofPurpose sec:authenticationMethod, sec:assertionMethod. + + a sec:JsonWebKey; + sec:publicKeyJwk "{\"key_ops\":[\"verify\"],\"ext\":true,\"kty\":\"RSA\",\"n\":\"zX1p6Rl0kTtbFAiISqXQbT9U6kqqIFRfualifsLA5ZNFQMDvuw3cqYUqzIyHAVYV0Bps-mXmwFVjsetQIzqhE0X2VhJ8wBS-bGxm2_E0rQ9y35mN_dWfnVhJYtONrLjkduAk04Xouws60X2ye0QHhG63j0CLNj6bqJQ_fOE_ankHxjGnZ7H7tPIeJfj9Md2GMx98BEr_8iwFXPJ5zT3_ET1zdPUGJV3r6pTOTu5H6sH5457TxCeMIIZfQ3hn3f5lz_5JL5ahtIqZ1BgaAYt5bgg63oqmn67V1fmJ08tx1LT6BbGkt1WQBv8aNgm3Z5ztLWC8MJiqgeFpyQFUOIkVCQ\",\"e\":\"AQAB\",\"alg\":\"PS256\"}"^^; + sec:controller ; + sec:expires "2050-02-24T00:00:00Z"^^xsd:dateTime +} +_:b1 { + a sec:DataIntegrityProof; sec:cryptosuite "ecdsa-2022"; - sec:created "2024-02-27T14:26:37.928Z"^^xsd:dateTime; - sec:verificationMethod ; - sec:proofValue "uyT3-XGBLlWNmFtkW0u6Hs_iqARi4VhXr2WswK6C-I2lMEs8gHxP7kCIaUnSLS6ptt5KBmI5hZXWzb-TxQ0uaMQ"; - sec:proofPurpose sec:authenticationMethod. - a sec:JsonWebKey; + sec:verificationMethod ; + sec:proofValue "uUu0Ht5CMJpKN9ChSlKSAcccKA0Ym14v3jZGXSRBSpMZ82U36AuGaWo3V31M2magZG9kgKtSLhNZRoKg-b8paXg"; + sec:created "2024-03-07T11:52:00.820Z"^^xsd:dateTime; + sec:proofPurpose sec:authenticationMethod, sec:assertionMethod; + sec:previousProof . + + a sec:JsonWebKey; sec:publicKeyJwk "{\"key_ops\":[\"verify\"],\"ext\":true,\"kty\":\"EC\",\"x\":\"LHGayjy__zWhz14u7vWyGbPWkXNdJN1AnhKiQTv3uK8\",\"y\":\"9T4ThlFhLi84d3LOaOkvrzLrr_EEczB0sIv3S3vzdd8\",\"crv\":\"P-256\"}"^^; - sec:controller ; + sec:controller ; sec:expires "2055-02-24T00:00:00Z"^^xsd:dateTime } - -_:b1 { - a sec:DataIntegrityProof; +_:b2 { + a sec:DataIntegrityProof; sec:cryptosuite "ecdsa-2022"; - sec:created "2024-02-27T14:26:37.929Z"^^xsd:dateTime; - sec:verificationMethod ; - sec:proofValue "uejeL3giQd5cdZbP-b6f5A1RVpi6k7ofP4xnUL8r4I42tiiqbsw1Y-dn70OwcLUYepQQTxC0LtsjXO0cvR0lwQA"; - sec:proofPurpose sec:authenticationMethod; - sec:previousProof . - a sec:JsonWebKey; + sec:verificationMethod ; + sec:proofValue "ui7tvde12p6kFPzzKQHV4ARYsxY4xXSpHrEC6YSecboeLtUkRKGUh0qgYWy3whj3Lfl0Wj906sLt80hy5dx7nkw"; + sec:created "2024-03-07T11:52:00.821Z"^^xsd:dateTime; + sec:proofPurpose sec:authenticationMethod, sec:assertionMethod; + sec:previousProof . + + a sec:JsonWebKey; sec:publicKeyJwk "{\"key_ops\":[\"verify\"],\"ext\":true,\"kty\":\"EC\",\"x\":\"qtQA8VF1KFsvSfq1BkZP0rODQcQF8x-uL1BXgNcKWIY\",\"y\":\"smHmMXIf9FlkY0pbfrZfuCg0BMviSj8IjMhzTPJehLI\",\"crv\":\"P-256\"}"^^; sec:controller } -_:b2 { - a sec:DataIntegrityProof; - sec:cryptosuite "ecdsa-2022"; - sec:created "2024-02-27T14:26:37.929Z"^^xsd:dateTime; - sec:verificationMethod ; - sec:proofValue "uJP_kCAZKqlT4YhiKcaV6OcbfXWvrAhwZeANrspwf8LL3VrvUsmUDzfU9NUcdMalWRduXlK9l__uas7_UHR9RKQ"; - sec:proofPurpose sec:authenticationMethod; - sec:previousProof . - a sec:JsonWebKey; - sec:publicKeyJwk "{\"key_ops\":[\"verify\"],\"ext\":true,\"kty\":\"EC\",\"x\":\"Va-VUJp8w8m4BiaKVxxMaIWjZgf0zaPGvJx0oiaX-ys\",\"y\":\"rV7eBS8u4d1eyA7IcNSVoD5C83IdesNMB-gVznJe3uY\",\"crv\":\"P-256\"}"^^; - sec:controller -} - diff --git a/index.ts b/index.ts index b00b771..70addb9 100644 --- a/index.ts +++ b/index.ts @@ -1,570 +1,244 @@ +/** + * Externally visible API level for the package. + * + * + * @packageDocumentation + */ + // deno-lint-ignore-file no-inferrable-types /// import * as rdf from '@rdfjs/types'; import * as n3 from 'n3'; -import { v4 as uuid } from 'uuid'; -import * as errors from './lib/errors'; -import { ProblemDetail } from './lib/errors'; -import { - createPrefix, isDatasetCore, convertToStore, DatasetMap, MapContent, - textToArrayBuffer, calculateDatasetHash, arrayBufferToBase64Url, base64UrlToArrayBuffer, - } from './lib/utils'; - -// n3.DataFactory is a namespace with some functions... -const { namedNode, literal, quad } = n3.DataFactory; +import * as types from './lib/types'; -import { Cryptosuites, Cryptosuite, SuiteMetadata, VerificationResult } from './lib/common'; -export { Cryptosuites, Cryptosuite, SuiteMetadata, VerificationResult } from './lib/common'; +import { Errors, KeyData, VerificationResult } from './lib/types'; +import { isKeyData, isDatasetCore, convertToStore, DatasetMap, MapContent, calculateDatasetHash } from './lib/utils'; +import { generateAProofGraph, verifyAProofGraph, rdf_type, sec_di_proof, sec_proof, sec_prefix } from './lib/proof_utils'; -export { generateKey, KeyPair } from './lib/crypto_utils'; -import { KeyPair } from './lib/crypto_utils'; +/* This file is also the "top level", so a number of exports are put here to be more friendly to users */ +export { KeyData, VerificationResult, KeyMetadata, Cryptosuites } from './lib/types'; +export { generateKey, KeyDetails } from './lib/crypto_utils'; +// n3.DataFactory is a namespace with some functions... +const { quad } = n3.DataFactory; /** - * Type guard to check if an object implements the KeyPair interface. + * Generate a (separate) proof graph (or graphs), per the DI spec. The signature is stored in + * multibase format, using base64url encoding. Keys are accepted, and stored in JWK format. * - * @param obj + * @param dataset + * @param keyData + * @throws - an error if there was a key issue while signing. * @returns */ -function isKeyPair(obj: any): obj is KeyPair { - return (obj as KeyPair).public !== undefined && (obj as KeyPair).private !== undefined; +export async function generateProofGraph(dataset: rdf.DatasetCore, keyData: Iterable): Promise; +export async function generateProofGraph(dataset: rdf.DatasetCore, keyData: KeyData): Promise; +export async function generateProofGraph(dataset: rdf.DatasetCore, keyData: KeyData | Iterable): Promise { + // Start fresh with results + const report: Errors = { errors : [], warnings : [] } + + // This is to be signed + const toBeSigned = await calculateDatasetHash(dataset); + // prepare for the overload of arguments + const keyPairs: Iterable = isKeyData(keyData) ? [keyData] : keyData; + // execute the proof graph generation concurrently + const promises: Promise[] = Array.from(keyPairs).map((keypair: KeyData) => generateAProofGraph(report, toBeSigned, keypair)); + const retval: rdf.DatasetCore[] = await Promise.all(promises); + // return by taking care of overloading. + if (report.errors.length !== 0) { + // There were possible errors while generating the signatures + const message: string = JSON.stringify(report.errors,null,4); + throw new types.Proof_Generation_Error(`${message}`); + } else { + return isKeyData(keyData) ? retval[0] : retval; + } } -/*************************************************************************************** - * Namespaces and specific terms that are used several times - **************************************************************************************/ - -/* Various namespaces, necessary when constructing a proof graph */ -const sec_prefix = createPrefix("https://w3id.org/security#"); -const rdf_prefix = createPrefix("http://www.w3.org/1999/02/22-rdf-syntax-ns#"); -const xsd_prefix = createPrefix("http://www.w3.org/2001/XMLSchema#"); - -const rdf_type: rdf.NamedNode = rdf_prefix('type'); -const sec_proof: rdf.NamedNode = sec_prefix('proof'); -const sec_di_proof: rdf.NamedNode = sec_prefix('DataIntegrityProof'); -const sec_proofValue: rdf.NamedNode = sec_prefix('proofValue'); -const sec_publicKeyJwk: rdf.NamedNode = sec_prefix('publicKeyJwk'); -const sec_proofPurpose: rdf.NamedNode = sec_prefix('proofPurpose'); -const sec_authenticationMethod: rdf.NamedNode = sec_prefix('authenticationMethod') -const sec_assertionMethod: rdf.NamedNode = sec_prefix('assertionMethod'); -const sec_verificationMethod: rdf.NamedNode = sec_prefix('verificationMethod'); -const sec_expires: rdf.NamedNode = sec_prefix('expires'); -const sec_revoked: rdf.NamedNode = sec_prefix('revoked'); -const sec_created: rdf.NamedNode = sec_prefix('created'); -const xsd_datetime: rdf.NamedNode = xsd_prefix('dateTime'); - - - -/***************************************************************************************** - * The real meat... - *****************************************************************************************/ /** - * Subclasses are supposed to set the right algorithm, cryptosuite, etc, names. + * Verify the separate proof graph. * + * The validity result is the conjunction of the validation result for each proof graphs separately. + * + * The following checks are made: + * + * 1. There should be exactly one [proof value](https://www.w3.org/TR/vc-data-integrity/#dfn-proofvalue) + * 2. There should be exactly one [verification method](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod), which should be a separate resource containing the key (in JWK) + * 3. The key's (optional) [expiration](https://www.w3.org/TR/vc-data-integrity/#defn-proof-expires) and + * [revocation](https://www.w3.org/TR/vc-data-integrity/#dfn-revoked) dates are checked and compared to the current time which should be "before" + * 4. The proof's [creation date](https://www.w3.org/TR/vc-data-integrity/#dfn-created) must be before the current time + * 5. The proof [purpose(s)](https://www.w3.org/TR/vc-data-integrity/#dfn-proofpurpose) must be set, and the values are either [authentication](https://www.w3.org/TR/vc-data-integrity/#dfn-authentication) or [verification](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod) + * + * If any of those errors are found, the validation result is `false`. The error reports themselves, with some more details, are part of the verification result structure. + * + * @param dataset + * @param proofGraph + * @returns */ -abstract class DataIntegrity { - protected _algorithm: string = ''; - protected _cryptosuite: string = ''; - protected _hash: string = ''; - protected _curve: string = ''; - // The extra initialization is here to make deno happy. - // In fact, it is initialized in the constructor via - // the call to `initResults()` - protected _result: VerificationResult = { - verified: false, - verifiedDocument: null, - warnings: [], - errors: [] - }; - - constructor() { - this._hash = "SHA-256"; - this.initResults(); - } - - protected initResults() { - this._result = { - verified : false, - verifiedDocument : null, - warnings : [], - errors : [], - } - } - - - // get algorithm(): string { return this._algorithm } - // get cryptosuite(): string { return this._cryptosuite; } - // get hash(): string { return this._hash; } - // get curve(): string { return this._curve; } - - /**************************************************************************************************/ - /* Internal functions. All of them are protected, ie, usable by the concrete subclasses */ - /**************************************************************************************************/ - /** - * Import a JWK encoded key into a key usable by crypto.subtle. - * - * @param key - the key itself - * @param type - whether this is a private or public key (usable to sign or verify, respectively) - * - * @returns - */ - protected async importKey(key: JsonWebKey, type: Confidentiality): Promise { - try { - const retval = await crypto.subtle.importKey("jwk", key, - { - name: this._algorithm, - namedCurve: this._curve, - }, - true, - type === Confidentiality.public ? ["verify"] : ["sign"] - ); - if (retval === null) { - this._result.errors.push(new errors.Invalid_Verification_Method(`Invalid key: ${JSON.stringify(key,null,4)}`)); - } - return retval; - } catch(e) { - this._result.errors.push(new errors.Invalid_Verification_Method(`Invalid key: ${JSON.stringify(key)} (${e.message})`)); - return null; - } - }; - - /** - * Generate a (separate) proof graph, per the DI spec. The signature is stored in - * multibase format, using base64url encoding. - * - * @param hashValue - this is the value of the Dataset's canonical hash - * @param keyPair - * @returns - */ - protected async generateAProofGraph(hashValue: string, keyPair: KeyPair): Promise { - // Calculate the hash of the dataset, and sign the hash with the secret key - // This is the "core"... - const signHashValue = async (): Promise => { - const key: CryptoKey|null = await this.importKey(keyPair.private, Confidentiality.secret); - if (key === null) { - return ""; - } else { - const raw_signature: ArrayBuffer = await crypto.subtle.sign( - { - name: this._algorithm, - hash: this._hash - }, - key, - textToArrayBuffer(hashValue) - ); - return `u${arrayBufferToBase64Url(raw_signature)}`; - } - }; - - // Create a proof graph. Just a boring set of quad generations... - const createProofGraph = (proofValue: string): rdf.DatasetCore => { - const retval: n3.Store = new n3.Store(); - - // Unique URL-s, for the time being as uuid-s - const proofGraphId = `urn:uuid:${uuid()}`; - const proofGraph = namedNode(proofGraphId); - - const verificationMethodId = `urn:uuid:${uuid()}` - const keyResource = namedNode(verificationMethodId); - - retval.addQuads([ - quad( - proofGraph, rdf_type, sec_di_proof - ), - quad( - proofGraph, sec_prefix('cryptosuite'), literal(this._cryptosuite) - ), - quad( - proofGraph, sec_created, literal((new Date()).toISOString(), xsd_datetime) - ), - quad( - proofGraph, sec_verificationMethod, keyResource - ), - quad( - proofGraph, sec_proofValue, literal(proofValue) - ), - quad( - proofGraph, sec_proofPurpose, sec_authenticationMethod - ), - quad( - proofGraph, sec_proofPurpose, sec_assertionMethod - ), - - quad( - keyResource, rdf_type, sec_prefix('JsonWebKey') - ), - quad( - keyResource, sec_publicKeyJwk, literal(JSON.stringify(keyPair.public), rdf_prefix('JSON')) - ), - ]); - if (keyPair.controller) retval.add(quad(keyResource, sec_prefix('controller'), namedNode(keyPair.controller))); - if (keyPair.expires) retval.add(quad(keyResource, sec_expires, literal(keyPair.expires, xsd_datetime))); - if (keyPair.revoked) retval.add(quad(keyResource, sec_revoked, literal(keyPair.revoked, xsd_datetime))); - return retval; - }; - return createProofGraph(await signHashValue()); +export async function verifyProofGraph(dataset: rdf.DatasetCore, proofGraph: rdf.DatasetCore | rdf.DatasetCore[]): Promise { + // start fresh with the results: + const report: Errors = { errors: [], warnings: [] } + + // this is the value that must be checked... + const hash = await calculateDatasetHash(dataset); + + // just to make the handling uniform... + const proofs: rdf.DatasetCore[] = isDatasetCore(proofGraph) ? [proofGraph] : proofGraph; + + // the "convertToStore" intermediate step is necessary; the proof graph checker needs a n3.Store + const promises: Promise[] = proofs.map(convertToStore).map((pr_graph: n3.Store): Promise => verifyAProofGraph(report, hash, pr_graph)); + const results: boolean[] = await Promise.all(promises); + + + const verified = (report.errors.length > 0) ? false : !results.includes(false); + + return { + verified, + verifiedDocument: verified ? dataset : null, + errors: report.errors, + warnings: report.warnings } +} - /** - * Check one proof graph, ie, whether the included signature corresponds to the hash value. - * - * The following checks are also made and, possibly, exception are raised with errors according to - * the DI standard: - * - * 1. There should be exactly one proof value - * 2. There should be exactly one verification method, which should be a separate resource containing the key - * 3. The key's possible expiration and revocation dates are checked and compared to the current time which should be - * "before" - * 4. The proof's creation date must be before the current time - * 5. The proof purpose(s) must be set, and the values are either authentication or verification - * - * @param hash - * @param proof - * @returns - */ - protected async verifyAProofGraph(hash: string, proof: n3.Store, proofId?: rdf.Quad_Graph): Promise { - const localErrors : errors.ProblemDetail[] = []; - const localWarnings : errors.ProblemDetail[] = []; - - // Verify the signature by check signature of the hash with the key - // This is the "core"... - const checkHashValue = async (proof_value: string, key_jwk: JsonWebKey): Promise => { - const key: CryptoKey|null = await this.importKey(key_jwk, Confidentiality.public); - const signature_array: ArrayBuffer = base64UrlToArrayBuffer(proof_value.slice(1)); - const data: ArrayBuffer = textToArrayBuffer(hash); - if (key === null) { - return false; - } else { - const retval: boolean = await crypto.subtle.verify( - { - name: this._algorithm, - hash: this._hash - }, - key, - signature_array, - data - ); - return retval; - } - }; - - const getProofValue = (store: n3.Store): string | null => { - // Retrieve the signature value per spec: - const proof_values: rdf.Quad[] = store.getQuads(null, sec_proofValue, null, null); - if (proof_values.length === 0) { - localErrors.push(new errors.Malformed_Proof_Error("No proof value")); - return null; - } else if(proof_values.length > 1) { - localErrors.push(new errors.Malformed_Proof_Error("Several proof values")); - } - return proof_values[0].object.value; - }; - - const getPublicKey = (store: n3.Store): JsonWebKey | null => { - // first see if the verificationMethod has been set properly - const verificationMethod: rdf.Quad[] = store.getQuads(null, sec_verificationMethod, null, null); - if (verificationMethod.length === 0) { - localErrors.push(new errors.Malformed_Proof_Error("No verification method")); - return null; - } else if (verificationMethod.length > 1) { - localErrors.push(new errors.Malformed_Proof_Error("Several verification methods")); - } - - const publicKey = verificationMethod[0].object; - const keys: rdf.Quad[] = store.getQuads(publicKey, sec_publicKeyJwk, null, null); - if (keys.length === 0) { - localErrors.push(new errors.Invalid_Verification_Method(`No key values`)); - return null; - } else if (keys.length > 1) { - localErrors.push(new errors.Invalid_Verification_Method("More than one keys provided")); - } - - // Check the creation/expiration/revocation dates, if any... - const now = new Date(); - const creationDates: rdf.Quad[] = store.getQuads(null, sec_created, null, null); - for (const exp of creationDates) { - if ((new Date(exp.object.value)) > now) { - localWarnings.push(new errors.Invalid_Verification_Method(`Proof was created in the future... ${exp.object.value}`)); - } - } - - const expirationDates: rdf.Quad[] = store.getQuads(publicKey, sec_expires, null, null); - for (const exp of expirationDates) { - if ((new Date(exp.object.value)) < now) { - localErrors.push(new errors.Invalid_Verification_Method(`<${publicKey.value}> key expired on ${exp.object.value}`)); - return null; - } - } - const revocationDates: rdf.Quad[] = store.getQuads(publicKey, sec_revoked, null, null); - for (const exp of revocationDates) { - if ((new Date(exp.object.value)) < now) { - localErrors.push(new errors.Invalid_Verification_Method(`<${publicKey.value}> key was revoked on ${exp.object.value}`)); - return null; - } - } - - try { - return JSON.parse(keys[0].object.value) as JsonWebKey; - } catch(e) { - // This happens if there is a JSON parse error with the key... - localWarnings.push(new errors.Malformed_Proof_Error(`Parsing error for JWK: ${e.message}`)); - return null; +/** + * Create a new dataset with the copy of the original and the proof graph(s) as a separate graph(s) within the + * dataset (a.k.a. "Embedded Proof" in the DI spec terminology). + * + * If the anchor is defined, then that will be the subject for quads with the `proof` property is added (one for each proof graph). + * + * If the `keyPair` argument is an Array, then the proof graphs are considered to be a Proof Chain. Otherwise, + * (e.g., if it is a Set), it is a Proof Set. + * + * @param dataset + * @param keyData + * @param anchor + * @returns + */ +export async function embedProofGraph(dataset: rdf.DatasetCore, keyData: KeyData | Iterable, anchor?: rdf.Quad_Subject): Promise { + const retval: n3.Store = convertToStore(dataset); + + const keyPairs: KeyData[] = isKeyData(keyData) ? [keyData] : Array.from(keyData); + + const proofGraphs: rdf.DatasetCore[] = await generateProofGraph(dataset, keyPairs); + + const isKeyChain: boolean = keyPairs.length > 1 && Array.isArray(keyData); + const chain: { graph: rdf.BlankNode, proofId: rdf.Quad_Subject }[] = []; + + for (let i = 0; i < proofGraphs.length; i++) { + const proofTriples = proofGraphs[i]; + const proofGraphID = retval.createBlankNode(); + for (const q of proofTriples) { + retval.add(quad(q.subject, q.predicate, q.object, proofGraphID)); + if (isKeyChain && q.predicate.value === rdf_type.value && q.object.value === sec_di_proof.value) { + // Storing the values to create the proof chains in a subsequent step + // The subject is the ID of the proof + chain.push ({ + proofId: q.subject, + graph : proofGraphID, + }); } }; - - // Check the "proofPurpose" property value - const checkProofPurposes = (store: n3.Store): void => { - const purposes: rdf.Quad[] = store.getQuads(null, sec_proofPurpose, null, null); - if (purposes.length === 0) { - throw new errors.Invalid_Verification_Method("No proof purpose set"); - } else { - const wrongPurposes: string[] = []; - for (const q of purposes) { - if (!(q.object.equals(sec_authenticationMethod) || q.object.equals(sec_assertionMethod))) { - wrongPurposes.push(`<${q.object.value}>`); - } - } - if (wrongPurposes.length > 0) { - localErrors.push(new errors.Mismatched_Proof_Purpose(`Invalid proof purpose value(s): ${wrongPurposes.join(", ")}`)); - } - } - } - - // Retrieve necessary values with checks - checkProofPurposes(proof); - const publicKey: JsonWebKey | null = getPublicKey(proof); - const proofValue: string | null = getProofValue(proof); - - // The final set of error/warning should be modified with the proof graph's ID, if applicable - if (proofId) { - localErrors.forEach((error) => { - error.detail = `${error.detail} (graph ID: <${proofId.value}>)`; - }); - localWarnings.forEach((warning) => { - warning.detail = `${warning.detail} (<${proofId.value}>)`; - }) - } - this._result.errors = [...this._result.errors, ...localErrors]; - this._result.warnings = [...this._result.warnings, ...localWarnings]; - - // Here we go with checking... - if (publicKey !== null && proofValue !== null) { - const check_results: boolean = await checkHashValue(proofValue, publicKey); - // the return value should nevertheless be false if there have been errors - return check_results ? localErrors.length === 0 : true - } else { - return false; - } - } - - - /** - * Generate a (separate) proof graph (or graphs), per the DI spec. The signature is stored in - * multibase format, using base64url encoding. - * - * This is just a wrapper around {@link generateAProofGraph} to take care of multiple key pairs. - * - * @param dataset - * @param keyPair - * @throws - an error if there was a key issue while signing. - * @returns - */ - async generateProofGraph(dataset: rdf.DatasetCore, keyPair: Iterable): Promise; - async generateProofGraph(dataset: rdf.DatasetCore, keyPair: KeyPair): Promise; - async generateProofGraph(dataset: rdf.DatasetCore, keyPair: KeyPair | Iterable): Promise { - // Start fresh with results - this.initResults(); - - // This is to be signed - const toBeSigned = await calculateDatasetHash(dataset); - // prepare for the overload of arguments - const keyPairs: Iterable = isKeyPair(keyPair) ? [keyPair] : keyPair; - // execute the proof graph generation concurrently - const promises: Promise[] = Array.from(keyPairs).map((keypair: KeyPair) => this.generateAProofGraph(toBeSigned, keypair)); - const retval: rdf.DatasetCore[] = await Promise.all(promises); - // return by taking care of overloading. - if (this._result.errors.length !== 0) { - // There were possible errors while generating the signatures - const message: string = JSON.stringify(this._result.errors,null,2); - throw new errors.Proof_Generation_Error(message); - } else { - return isKeyPair(keyPair) ? retval[0] : retval; + if (anchor) { + const q = quad(anchor, sec_proof, proofGraphID); + retval.add(q); } } - - /** - * Verify the separate proof graph. - * - * For now, this methods just does the minimum as a proof of concept. A more elaborate version will have - * to verify all details of the proof graph. - * - * @param dataset - * @param proofGraph - * @returns - */ - async verifyProofGraph(dataset: rdf.DatasetCore, proofGraph: rdf.DatasetCore): Promise; - async verifyProofGraph(dataset: rdf.DatasetCore, proofGraph: rdf.DatasetCore[]): Promise; - async verifyProofGraph(dataset: rdf.DatasetCore, proofGraph: rdf.DatasetCore | rdf.DatasetCore[]): Promise { - // start fresh with the results: - this.initResults(); - - // this is the value that must be checked... - const hash = await calculateDatasetHash(dataset); - - // just to make the handling uniform... - const proofs: rdf.DatasetCore[] = isDatasetCore(proofGraph) ? [proofGraph] : proofGraph; - - // the "convertToStore" intermediate step is necessary; the proof graph checker needs a n3.Store - const promises: Promise[] = proofs.map(convertToStore).map((pr_graph: n3.Store): Promise => this.verifyAProofGraph(hash, pr_graph)); - const results: boolean[] = await Promise.all(promises); - - return isDatasetCore(proofGraph) ? results[0] : results; - } - - /** - * Create a new dataset with the copy of the original and the proof graph as a separate graph within the - * dataset. - * - * The separate quad with the `proof` property is added; if the anchor is properly defined, then that - * will be the subject, otherwise a new blank node. (The latter may be meaningless, but makes it easier - * to find the proof graph for verification.) - * - * If the `keyPair` argument is an Array, then the proof graphs are considered to be a Proof Chain. Otherwise, - * (e.g., if it is a Set), it is a Proof Set. - * - * Just wrapper around {@link generateProofGraph}. - * @param dataset - * @param keyPair - * @param anchor - * @returns - */ - async embedProofGraph(dataset: rdf.DatasetCore, keyPair: KeyPair | Iterable, anchor ?: rdf.Quad_Subject): Promise { - const retval: n3.Store = convertToStore(dataset); - - const keyPairs: KeyPair[] = isKeyPair(keyPair) ? [keyPair] : Array.from(keyPair); - - const proofGraphs: rdf.DatasetCore[] = await this.generateProofGraph(dataset, keyPairs); - - const isKeyChain: boolean = keyPairs.length > 1 && Array.isArray(keyPair); - const chain: { graph: rdf.BlankNode, proofId: rdf.Quad_Subject }[] = []; - - for (let i = 0; i < proofGraphs.length; i++) { - const proofTriples = proofGraphs[i]; - const proofGraphID = retval.createBlankNode(); - for (const q of proofTriples) { - retval.add(quad(q.subject, q.predicate, q.object, proofGraphID)); - if (isKeyChain && q.predicate.value === rdf_type.value && q.object.value === sec_di_proof.value) { - // Storing the values to create the proof chains in a subsequent step - // The subject is the ID of the proof - chain.push ({ - proofId: q.subject, - graph : proofGraphID, - }); - } - }; - if (anchor) { - const q = quad(anchor, sec_proof, proofGraphID); - retval.add(q); - } + // Adding the chain statements, if required + if (isKeyChain) { + for (let i = 1; i < chain.length; i++) { + const q = quad(chain[i].proofId, sec_prefix("previousProof"), chain[i - 1].proofId, chain[i].graph); + retval.add(q); } - - // Adding the chain statements, if required - if (isKeyChain) { - for (let i = 1; i < chain.length; i++) { - const q = quad(chain[i].proofId, sec_prefix("previousProof"), chain[i - 1].proofId, chain[i].graph); - retval.add(q); - } - } - return retval; } + return retval; +} - /** - * Verify the dataset with embedded proof graphs. The individual proof graphs are identified by the presence - * of a type relationship to `DataIntegrityProof`; the result is the conjunction of the validation result for - * each proof graphs separately. - * - * The following checks are also made and, possibly, exception are raised with errors according to - * the DI standard: - * - * 1. There should be exactly one proof value - * 2. There should be exactly one verification method, which should be a separate resource containing the key - * 3. The key's possible expiration and revocation dates are checked and compared to the current time which should be "before" - * 4. The proof's creation date must be before the current time - * 5. The proof purpose(s) must be set, and the values are either authentication or verification - - * @param dataset - * @returns - */ - async verifyEmbeddedProofGraph(dataset: rdf.DatasetCore): Promise { - // start fresh with the results: - this.initResults(); - - const dataStore = new n3.Store(); - const proofGraphs = new DatasetMap(); - - // Separate the core data from the datasets; - // First, identify the possible dataset graph IDs - for (const q of dataset) { - // A dataset can be identified with a proof property. - if (q.predicate.equals(sec_proof)) { - // the object refers to a proof graph (unless it is a literal, which is a bug!) +/** + * Verify the dataset with embedded proof graph(s). + * + * If the anchor is present, the proof graphs are identified by the object terms of the corresponding [`proof`](https://www.w3.org/TR/vc-data-integrity/#proofs) quads. + * Otherwise, the type relationship to [`DataIntegrityProof`](https://www.w3.org/TR/vc-data-integrity/#dataintegrityproof) are considered. Note that if no anchor is provided, this second choice + * may lead to erroneous results because some of the embedded proof graphs are not meant to be a proof for the full dataset. (This may + * be the case in a ["Verifiable Presentation" style datasets](https://www.w3.org/TR/vc-data-model-2.0/#presentations-0).) + * + * The validity result is the conjunction of the validation result for each proof graphs separately. + * + * The following checks are also made. + * + * 1. There should be exactly one [proof value](https://www.w3.org/TR/vc-data-integrity/#dfn-proofvalue) + * 2. There should be exactly one [verification method](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod), which should be a separate resource containing the key (in JWK) + * 3. The key's (optional) [expiration](https://www.w3.org/TR/vc-data-integrity/#defn-proof-expires) and + * [revocation](https://www.w3.org/TR/vc-data-integrity/#dfn-revoked) dates are checked and compared to the current time which should be "before" + * 4. The proof's [creation date](https://www.w3.org/TR/vc-data-integrity/#dfn-created) must be before the current time + * 5. The proof [purpose(s)](https://www.w3.org/TR/vc-data-integrity/#dfn-proofpurpose) must be set, and the values are either [authentication](https://www.w3.org/TR/vc-data-integrity/#dfn-authentication) or [verification](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod) + * + * If any of those errors occur, the overall validity result is `false`. The error reports themselves, with some more details, are part of the verification result structure. + * + * @param dataset + * @param anchor + * @returns +*/ +export async function verifyEmbeddedProofGraph(dataset: rdf.DatasetCore, anchor?: rdf.Quad_Subject): Promise { + // start fresh with the results: + const report: Errors = { errors: [], warnings: [] } + + const dataStore = new n3.Store(); + const proofGraphs = new DatasetMap(); + + // First, identify the possible dataset graph IDs + for (const q of dataset) { + // Branching on whether there is an anchor explicitly setting the proof graphs + if (anchor) { + if (q.predicate.equals(sec_proof) && q.subject.equals(anchor)) { if (q.object.termType !== "Literal") { proofGraphs.item(q.object as rdf.Quad_Graph); - } - // The quad is not copied to the dataStore! - } else if (q.predicate.equals(rdf_type) && q.object.equals(sec_di_proof)) { - // the triple is in a proof graph! + } + } + } else { + // There is no anchor; we are looking for graphs whose type has been set + // This branch is the reason we have to use a DatasetMap for the + // storage of graph IDs; we should not have duplicate entries. + if (q.predicate.equals(rdf_type) && q.object.equals(sec_di_proof)) { proofGraphs.item(q.graph); } } + } - // By now, we got the identification of all the proof graphs, we can separate the quads among - // the data graph and the relevant proof graphs - for (const q of dataset) { - if (q.predicate.equals(sec_proof)) { - // this is an extra entry, not part of the triples that were signed - continue; - } else if(q.graph.termType === "DefaultGraph") { - dataStore.add(q) - } else if(proofGraphs.has(q.graph)) { - // this quad belongs to a proof graph! - // Note that the proof graphs contain only triples, because they are - // separate entities now... - proofGraphs.item(q.graph).add(quad(q.subject, q.predicate, q.object)); - } else { - // This a bona fide data quad - dataStore.add(q); - } + // By now, we got the identification of all the proof graphs, we can separate the quads among + // the data graph and the relevant proof graphs + for (const q of dataset) { + if (q.predicate.equals(sec_proof) && proofGraphs.has(q.graph)) { + // this is an extra entry, not part of the triples that were signed + // neither it is part of any proof graphs + continue; + } else if(q.graph.termType === "DefaultGraph") { + dataStore.add(q) + } else if(proofGraphs.has(q.graph)) { + // this quad belongs to a proof graph! + // Note that the separated proof graphs contain only triples, they become + // stand-alone RDF graphs now + proofGraphs.item(q.graph).add(quad(q.subject, q.predicate, q.object)); + } else { + // This a bona fide data quad, to be stored as such + dataStore.add(q); } + } - const hash = await calculateDatasetHash(dataStore); - - const proofs: MapContent[] = proofGraphs.data(); - const promises: Promise[] = proofs.map((prGraph: MapContent): Promise => this.verifyAProofGraph(hash, prGraph.dataset, prGraph.id)); - const results: boolean[] = await Promise.all(promises); + const hash = await calculateDatasetHash(dataStore); - if (this._result.errors.length > 0) { - this._result.verified = false; - } else { - this._result.verified = !results.includes(false); - } - this._result.verifiedDocument = this._result.verified ? dataStore : null; - return this._result - }; -} + const proofs: MapContent[] = proofGraphs.data(); + const promises: Promise[] = proofs.map((prGraph: MapContent): Promise => verifyAProofGraph(report, hash, prGraph.dataset, prGraph.id)); + const results: boolean[] = await Promise.all(promises); -/** - * Real instantiation of a DI cryptosuite: ecdsa-2022. - */ -export class DI_ECDSA extends DataIntegrity { - constructor() { - super(); - this._algorithm = "ECDSA"; - this._cryptosuite = "ecdsa-2022" - this._curve = "P-256" + const verified = (report.errors.length > 0) ? false : !results.includes(false); + return { + verified, + verifiedDocument: verified ? dataStore : null, + errors : report.errors, + warnings : report.warnings } } + diff --git a/lib/common.ts b/lib/common.ts deleted file mode 100644 index ddd3d12..0000000 --- a/lib/common.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Errors } from './errors'; -import { KeyPair } from './crypto_utils'; -export { KeyPair } from './crypto_utils'; -import * as rdf from '@rdfjs/types'; - -export enum Cryptosuites { - ecdsa = "ecdsa-2022", - rsa_pss = "rsa-pss-2024", - rsa_ssa = "rss-ssa-pkcs1-2024" -} - -/** Values used internally for the crypto functions; they are defined by the WebCrypto spec. */ -export enum Confidentiality { - public = "public", - secret = "secret" -} - -export interface VerificationResult extends Errors { - verified: boolean, - verifiedDocument: rdf.DatasetCore | null, -} - - -export interface SuiteMetadata { - controller?: string, - expires?: string, - revoked?: string, -} - -export interface Cryptosuite extends KeyPair, SuiteMetadata { - cryptosuite: string; - created: string; -} - diff --git a/lib/crypto_utils.ts b/lib/crypto_utils.ts index ae97c48..028bec4 100644 --- a/lib/crypto_utils.ts +++ b/lib/crypto_utils.ts @@ -1,15 +1,30 @@ -import { Errors } from "./errors"; -import * as errors from "./errors"; -import { SuiteMetadata, Cryptosuite, Cryptosuites } from './common'; +/** + * "Internal API" to the WebCrypto facilities. + * + * Put into a separate file for an easier maintenance; not meant + * to be part of the external API. + * Most of them are not exported (via `index.ts`) to + * package users. + * + * Note that, at the moment, the "interchange format" for keys is restricted to JWK. One + * area of improvement may be to allow for other formats (the DI standard refers to Multikey). + * + * @packageDocumentation + */ -export interface KeyPair { - public: JsonWebKey, - private: JsonWebKey, -} +import * as types from "./types"; +import { KeyMetadata, KeyData, Cryptosuites, KeyPair, Errors } from './types'; +/** JWK values for the algorithms that are relevant for this package */ export type Alg = "RS256" | "RS384" | "RS512" | "PS256" | "PS384" | "PS512"; + +/** JWK values for the elliptic curves that are relevant for this package */ export type Crv = "P-256" | "P-384" | "P-521"; + +/** JWK values for the hash methods that are relevant for this package */ export type Hsh = "SHA-256" | "SHA-384" | "SHA-512"; + +/** JWK values for the key types that are relevant for this package */ export type Kty = "EC" | "RSA"; interface WebCryptoAPIData { @@ -19,7 +34,8 @@ interface WebCryptoAPIData { namedCurve ?: Crv; } -export interface Cryptodata { +/** Information that may be used when generating new keys */ +export interface KeyDetails { namedCurve?: Crv, hash?: Hsh, modulusLength?: number; @@ -31,7 +47,7 @@ export interface Cryptodata { * ***********************************************************************************/ -// Default values for keys, some of them can be overwritten +/** Default values for keys, some of them can be overwritten */ const SALT_LENGTH = 32; const DEFAULT_MODUS_LENGTH = 2048; const DEFAULT_HASH = "SHA-256"; @@ -63,14 +79,14 @@ function algorithmData(report: Errors, key: JsonWebKey): WebCryptoAPIData | null return { name: "ECDSA", namedCurve: key.crv as Crv, - hash: "SHA-256" + hash: DEFAULT_HASH } } case "RSA" : { try { return RsaAlgs[key.alg as Alg]; } catch (e) { - report.errors.push(new errors.Unclassified_Error(`Key's error in 'alg': ${e.message}`)); + report.errors.push(new types.Unclassified_Error(`Key's error in 'alg': ${e.message}`)); return null; } } @@ -189,7 +205,7 @@ export async function sign(report: Errors, message: string, secretKey: JsonWebKe // Turn the the signature into Base64URL, and the into multicode return `u${arrayBufferToBase64Url(rawSignature)}`; } catch(e) { - report.errors.push(new errors.Proof_Generation_Error(e.message)); + report.errors.push(new types.Proof_Generation_Error(e.message)); return null; } } @@ -200,15 +216,16 @@ export async function sign(report: Errors, message: string, secretKey: JsonWebKe * * Possible errors are added to the report, no exceptions should be thrown. * - * @param report + * @param report - placeholder for error reports * @param message - * @param secretKey + * @param signature + * @param publicKey * @returns */ export async function verify(report: Errors, message: string, signature: string, publicKey: JsonWebKey): Promise { const rawMessage: ArrayBuffer = textToArrayBuffer(message); if (signature.length === 0 || signature[0] !== 'u') { - report.errors.push(new errors.Malformed_Proof_Error(`Signature is of an incorrect format (${signature})`)); + report.errors.push(new types.Malformed_Proof_Error(`Signature is of an incorrect format (${signature})`)); return false; } const rawSignature: ArrayBuffer = base64UrlToArrayBuffer(signature.slice(1)); @@ -224,77 +241,78 @@ export async function verify(report: Errors, message: string, signature: string, const retval: boolean = await crypto.subtle.verify(algorithm, key, rawSignature, rawMessage); return retval; } catch(e) { - report.errors.push(new errors.Proof_Generation_Error(e.message)); + report.errors.push(new types.Proof_Generation_Error(e.message)); return false; } } } /** - * Mapping from the JWK data to the corresponding cryptosuite identifier. + * Mapping from the JWK data to the corresponding DI cryptosuite identifier. * - * @param report + * @param report - placeholder for error reports * @param keyPair * @returns */ -export function cryptosuite(report: Errors, keyPair: KeyPair): Cryptosuites | null { +export function cryptosuiteId(report: Errors, keyPair: KeyPair): Cryptosuites | null { // Some elementary check - if (keyPair.private.kty !== keyPair.public.kty || - keyPair.private.crv !== keyPair.public.crv || + if (keyPair.private.kty !== keyPair.public.kty || + keyPair.private.crv !== keyPair.public.crv || keyPair.private.alg !== keyPair.private.alg) { - report.errors.push(new errors.Invalid_Verification_Method('Keys are not in pair')); + report.errors.push(new types.Invalid_Verification_Method('Keys are not in pair (in:\n ${JSON.stringify(keyPair,null,4)})')); return null; } const alg = algorithmData(report, keyPair.public); switch (alg.name) { - case "EC" : return Cryptosuites.ecdsa; - case "RSA-PSS" : return Cryptosuites.rsa_pss; - case "RSASSA-PKCS1-v1_5" : return Cryptosuites.rsa_ssa; - default : { - report.errors.push(new errors.Invalid_Verification_Method(`Unknown alg (${alg.name})`)); - return null ; + case "ECDSA": return Cryptosuites.ecdsa; + case "RSA-PSS": return Cryptosuites.rsa_pss; + case "RSASSA-PKCS1-v1_5": return Cryptosuites.rsa_ssa; + default: { + report.errors.push(new types.Invalid_Verification_Method(`Unknown alg (${alg.name} in:\n ${JSON.stringify(keyPair,null,4)})`)); + return null; } } } /** - * Generate key pair to be used with DI in general + * Generate key pair to be used with DI in general. This function is not necessary for the core + * functionalities of the package, but may be useful for the package users. It is therefore + * meant to be re-exported via the `index.ts` module. * * @param metadata * @param suite - * @param cryptodata + * @param keyData * @returns */ -export async function generateKey(suite: Cryptosuites, metadata?: SuiteMetadata, cryptodata?: Cryptodata): Promise { +export async function generateKey(suite: Cryptosuites, metadata?: KeyMetadata, keyData?: KeyDetails): Promise { const suiteToAPI = (): any => { switch(suite) { case Cryptosuites.ecdsa : return { name: "ECDSA", - namedCurve: cryptodata?.namedCurve || DEFAULT_CURVE, + namedCurve: keyData?.namedCurve || DEFAULT_CURVE, } case Cryptosuites.rsa_pss : return { name: "RSA-PSS", - modulusLength: cryptodata?.modulusLength || DEFAULT_MODUS_LENGTH, + modulusLength: keyData?.modulusLength || DEFAULT_MODUS_LENGTH, publicExponent: new Uint8Array([0x01, 0x00, 0x01]), - hash: cryptodata?.hash || DEFAULT_HASH, + hash: keyData?.hash || DEFAULT_HASH, } case Cryptosuites.rsa_ssa: return { name: 'RSASSA-PKCS1-v1_5', - modulusLength: cryptodata?.modulusLength || DEFAULT_MODUS_LENGTH, + modulusLength: keyData?.modulusLength || DEFAULT_MODUS_LENGTH, publicExponent: new Uint8Array([0x01, 0x00, 0x01]), - hash: cryptodata?.hash || DEFAULT_HASH, + hash: keyData?.hash || DEFAULT_HASH, } } } - const newPair = await crypto.subtle.generateKey(suiteToAPI(),true, ["sign", "verify"]); + const newPair = await crypto.subtle.generateKey(suiteToAPI(), true, ["sign", "verify"]); const keyPair = await toJWK(newPair); - const retval: Cryptosuite = { + const retval: KeyData = { public : keyPair.public, private : keyPair.private, cryptosuite : `${suite}`, - created : (new Date()).toISOString(), } return {...retval, ...metadata}; } diff --git a/lib/proofs.ts b/lib/proof_utils.ts similarity index 50% rename from lib/proofs.ts rename to lib/proof_utils.ts index fcf702b..c6fe40c 100644 --- a/lib/proofs.ts +++ b/lib/proof_utils.ts @@ -1,48 +1,63 @@ -import * as rdf from '@rdfjs/types'; -import * as n3 from 'n3'; +/** + * "Internal API" for handling proof graphs. + * + * Put into a separate file for an easier maintenance; not meant + * to be part of the external API. + * They are not exported (via `index.ts`) to + * package users. + * + * @packageDocumentation + */ + + +import * as rdf from '@rdfjs/types'; +import * as n3 from 'n3'; import { v4 as uuid } from 'uuid'; -import * as errors from './errors'; -import { ProblemDetail, Errors } from './errors'; -import { createPrefix } from './utils'; -import { sign, verify } from './crypto_utils'; +import * as types from './types'; +import { Errors, KeyData } from './types'; +import { createPrefix } from './utils'; +import { sign, verify, cryptosuiteId } from './crypto_utils'; + // n3.DataFactory is a namespace with some functions... const { namedNode, literal, quad } = n3.DataFactory; -import { Cryptosuite } from './common'; /*************************************************************************************** * Namespaces and specific terms that are used several times **************************************************************************************/ /* Various namespaces, necessary when constructing a proof graph */ -const sec_prefix = createPrefix("https://w3id.org/security#"); -const rdf_prefix = createPrefix("http://www.w3.org/1999/02/22-rdf-syntax-ns#"); -const xsd_prefix = createPrefix("http://www.w3.org/2001/XMLSchema#"); - -const rdf_type: rdf.NamedNode = rdf_prefix('type'); -const sec_proof: rdf.NamedNode = sec_prefix('proof'); -const sec_di_proof: rdf.NamedNode = sec_prefix('DataIntegrityProof'); -const sec_proofValue: rdf.NamedNode = sec_prefix('proofValue'); -const sec_publicKeyJwk: rdf.NamedNode = sec_prefix('publicKeyJwk'); -const sec_proofPurpose: rdf.NamedNode = sec_prefix('proofPurpose'); -const sec_authenticationMethod: rdf.NamedNode = sec_prefix('authenticationMethod'); -const sec_assertionMethod: rdf.NamedNode = sec_prefix('assertionMethod'); -const sec_verificationMethod: rdf.NamedNode = sec_prefix('verificationMethod'); -const sec_expires: rdf.NamedNode = sec_prefix('expires'); -const sec_revoked: rdf.NamedNode = sec_prefix('revoked'); -const sec_created: rdf.NamedNode = sec_prefix('created'); -const xsd_datetime: rdf.NamedNode = xsd_prefix('dateTime'); +export const sec_prefix = createPrefix("https://w3id.org/security#"); +export const rdf_prefix = createPrefix("http://www.w3.org/1999/02/22-rdf-syntax-ns#"); +export const xsd_prefix = createPrefix("http://www.w3.org/2001/XMLSchema#"); + +export const rdf_type: rdf.NamedNode = rdf_prefix('type'); +export const sec_proof: rdf.NamedNode = sec_prefix('proof'); +export const sec_di_proof: rdf.NamedNode = sec_prefix('DataIntegrityProof'); +export const sec_proofValue: rdf.NamedNode = sec_prefix('proofValue'); +export const sec_publicKeyJwk: rdf.NamedNode = sec_prefix('publicKeyJwk'); +export const sec_proofPurpose: rdf.NamedNode = sec_prefix('proofPurpose'); +export const sec_authenticationMethod: rdf.NamedNode = sec_prefix('authenticationMethod'); +export const sec_assertionMethod: rdf.NamedNode = sec_prefix('assertionMethod'); +export const sec_verificationMethod: rdf.NamedNode = sec_prefix('verificationMethod'); +export const sec_expires: rdf.NamedNode = sec_prefix('expires'); +export const sec_revoked: rdf.NamedNode = sec_prefix('revoked'); +export const sec_created: rdf.NamedNode = sec_prefix('created'); +export const xsd_datetime: rdf.NamedNode = xsd_prefix('dateTime'); /** * Generate a (separate) proof graph, per the DI spec. The signature is stored in - * multibase format, using base64url encoding. + * [multibase format](https://www.w3.org/TR/vc-data-integrity/#multibase-0), using base64url encoding. * + * @param report - placeholder for error reports * @param hashValue - this is the value of the Dataset's canonical hash - * @param suite + * @param keyData * @returns */ -export async function generateAProofGraph(report: Errors, hashValue: string, suite: Cryptosuite): Promise < rdf.DatasetCore > { +export async function generateAProofGraph(report: Errors, hashValue: string, keyData: KeyData): Promise < rdf.DatasetCore > { + const cryptosuite = keyData?.cryptosuite || cryptosuiteId(report, keyData) + // Create a proof graph. Just a boring set of quad generations... const createProofGraph = (proofValue: string): rdf.DatasetCore => { const retval: n3.Store = new n3.Store(); @@ -59,7 +74,7 @@ export async function generateAProofGraph(report: Errors, hashValue: string, sui proofGraph, rdf_type, sec_di_proof ), quad( - proofGraph, sec_prefix('cryptosuite'), literal(suite.cryptosuite) + proofGraph, sec_prefix('cryptosuite'), literal(cryptosuite) ), quad( proofGraph, sec_verificationMethod, keyResource @@ -67,6 +82,9 @@ export async function generateAProofGraph(report: Errors, hashValue: string, sui quad( proofGraph, sec_proofValue, literal(proofValue) ), + quad( + proofGraph, sec_created, literal((new Date()).toISOString(), xsd_datetime) + ), quad( proofGraph, sec_proofPurpose, sec_authenticationMethod ), @@ -78,47 +96,49 @@ export async function generateAProofGraph(report: Errors, hashValue: string, sui keyResource, rdf_type, sec_prefix('JsonWebKey') ), quad( - keyResource, sec_publicKeyJwk, literal(JSON.stringify(suite.public), rdf_prefix('JSON')) + keyResource, sec_publicKeyJwk, literal(JSON.stringify(keyData.public), rdf_prefix('JSON')) ), ]); - if (suite.created) retval.add(quad(proofGraph, sec_revoked, literal(suite.created, xsd_datetime))) - if (suite.controller) retval.add(quad(keyResource, sec_prefix('controller'), namedNode(suite.controller))); - if (suite.expires) retval.add(quad(keyResource, sec_expires, literal(suite.expires, xsd_datetime))); - if (suite.revoked) retval.add(quad(keyResource, sec_revoked, literal(suite.revoked, xsd_datetime))); + if (keyData.controller) retval.add(quad(keyResource, sec_prefix('controller'), namedNode(keyData.controller))); + if (keyData.expires) retval.add(quad(keyResource, sec_expires, literal(keyData.expires, xsd_datetime))); + if (keyData.revoked) retval.add(quad(keyResource, sec_revoked, literal(keyData.revoked, xsd_datetime))); return retval; }; - return createProofGraph(await sign(report, hashValue, suite.private)); + return createProofGraph(await sign(report, hashValue, keyData.private)); }; /** * Check one proof graph, ie, whether the included signature corresponds to the hash value. * - * The following checks are also made and, possibly, exception are raised with errors according to - * the DI standard: + * The following checks are also made: + * + * 1. There should be exactly one [proof value](https://www.w3.org/TR/vc-data-integrity/#dfn-proofvalue) + * 2. There should be exactly one [verification method](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod), which should be a separate resource containing the key (in JWK) + * 3. The key's (optional) [expiration](https://www.w3.org/TR/vc-data-integrity/#defn-proof-expires) and + * [revocation](https://www.w3.org/TR/vc-data-integrity/#dfn-revoked) dates are checked and compared to the current time which should be "before" + * 4. The proof's [creation date](https://www.w3.org/TR/vc-data-integrity/#dfn-created) must be before the current time + * 5. The proof [purpose(s)](https://www.w3.org/TR/vc-data-integrity/#dfn-proofpurpose) must be set, and the values are either [authentication](https://www.w3.org/TR/vc-data-integrity/#dfn-authentication) or [verification](https://www.w3.org/TR/vc-data-integrity/#dfn-verificationmethod) * - * 1. There should be exactly one proof value - * 2. There should be exactly one verification method, which should be a separate resource containing the key - * 3. The key's possible expiration and revocation dates are checked and compared to the current time which should be - * "before" - * 4. The proof's creation date must be before the current time - * 5. The proof purpose(s) must be set, and the values are either authentication or verification + * Errors are stored in the `report` structure. If any error occurs, the result is false. * + * @param report - placeholder for error reports * @param hash - * @param proof + * @param proof - the proof graph + * @param proofId - Id of the proof graph, if known; used in the error reports only * @returns */ -async function verifyAProofGraph(report: Errors, hash: string, proof: n3.Store, proofId ?: rdf.Quad_Graph): Promise < boolean > { - const localErrors : errors.ProblemDetail[] = []; - const localWarnings : errors.ProblemDetail[] = []; +export async function verifyAProofGraph(report: Errors, hash: string, proof: n3.Store, proofId ?: rdf.Quad_Graph): Promise < boolean> { + const localErrors : types.ProblemDetail[] = []; + const localWarnings : types.ProblemDetail[] = []; const getProofValue = (store: n3.Store): string | null => { // Retrieve the signature value per spec: const proof_values: rdf.Quad[] = store.getQuads(null, sec_proofValue, null, null); if (proof_values.length === 0) { - localErrors.push(new errors.Malformed_Proof_Error("No proof value")); + localErrors.push(new types.Malformed_Proof_Error("No proof value")); return null; } else if (proof_values.length > 1) { - localErrors.push(new errors.Malformed_Proof_Error("Several proof values")); + localErrors.push(new types.Malformed_Proof_Error("Several proof values")); } return proof_values[0].object.value; }; @@ -127,19 +147,19 @@ async function verifyAProofGraph(report: Errors, hash: string, proof: n3.Store, // first see if the verificationMethod has been set properly const verificationMethod: rdf.Quad[] = store.getQuads(null, sec_verificationMethod, null, null); if (verificationMethod.length === 0) { - localErrors.push(new errors.Malformed_Proof_Error("No verification method")); + localErrors.push(new types.Malformed_Proof_Error("No verification method")); return null; } else if (verificationMethod.length > 1) { - localErrors.push(new errors.Malformed_Proof_Error("Several verification methods")); + localErrors.push(new types.Malformed_Proof_Error("Several verification methods")); } const publicKey = verificationMethod[0].object; const keys: rdf.Quad[] = store.getQuads(publicKey, sec_publicKeyJwk, null, null); if (keys.length === 0) { - localErrors.push(new errors.Invalid_Verification_Method(`No key values`)); + localErrors.push(new types.Invalid_Verification_Method(`No key values`)); return null; } else if (keys.length > 1) { - localErrors.push(new errors.Invalid_Verification_Method("More than one keys provided")); + localErrors.push(new types.Invalid_Verification_Method("More than one keys provided")); } // Check the creation/expiration/revocation dates, if any... @@ -147,21 +167,21 @@ async function verifyAProofGraph(report: Errors, hash: string, proof: n3.Store, const creationDates: rdf.Quad[] = store.getQuads(null, sec_created, null, null); for (const exp of creationDates) { if ((new Date(exp.object.value)) > now) { - localWarnings.push(new errors.Invalid_Verification_Method(`Proof was created in the future... ${exp.object.value}`)); + localWarnings.push(new types.Invalid_Verification_Method(`Proof was created in the future... ${exp.object.value}`)); } } const expirationDates: rdf.Quad[] = store.getQuads(publicKey, sec_expires, null, null); for (const exp of expirationDates) { if ((new Date(exp.object.value)) < now) { - localErrors.push(new errors.Invalid_Verification_Method(`<${publicKey.value}> key expired on ${exp.object.value}`)); + localErrors.push(new types.Invalid_Verification_Method(`<${publicKey.value}> key expired on ${exp.object.value}`)); return null; } } const revocationDates: rdf.Quad[] = store.getQuads(publicKey, sec_revoked, null, null); for (const exp of revocationDates) { if ((new Date(exp.object.value)) < now) { - localErrors.push(new errors.Invalid_Verification_Method(`<${publicKey.value}> key was revoked on ${exp.object.value}`)); + localErrors.push(new types.Invalid_Verification_Method(`<${publicKey.value}> key was revoked on ${exp.object.value}`)); return null; } } @@ -170,7 +190,7 @@ async function verifyAProofGraph(report: Errors, hash: string, proof: n3.Store, return JSON.parse(keys[0].object.value) as JsonWebKey; } catch (e) { // This happens if there is a JSON parse error with the key... - localWarnings.push(new errors.Malformed_Proof_Error(`Parsing error for JWK: ${e.message}`)); + localWarnings.push(new types.Malformed_Proof_Error(`Parsing error for JWK: ${e.message}`)); return null; } }; @@ -179,7 +199,7 @@ async function verifyAProofGraph(report: Errors, hash: string, proof: n3.Store, const checkProofPurposes = (store: n3.Store): void => { const purposes: rdf.Quad[] = store.getQuads(null, sec_proofPurpose, null, null); if (purposes.length === 0) { - throw new errors.Invalid_Verification_Method("No proof purpose set"); + localErrors.push(new types.Invalid_Verification_Method("No proof purpose set")) } else { const wrongPurposes: string[] = []; for (const q of purposes) { @@ -188,7 +208,7 @@ async function verifyAProofGraph(report: Errors, hash: string, proof: n3.Store, } } if (wrongPurposes.length > 0) { - localErrors.push(new errors.Mismatched_Proof_Purpose(`Invalid proof purpose value(s): ${wrongPurposes.join(", ")}`)); + localErrors.push(new types.Mismatched_Proof_Purpose(`Invalid proof purpose value(s): ${wrongPurposes.join(", ")}`)); } } } @@ -204,7 +224,7 @@ async function verifyAProofGraph(report: Errors, hash: string, proof: n3.Store, error.detail = `${error.detail} (graph ID: <${proofId.value}>)`; }); localWarnings.forEach((warning) => { - warning.detail = `${warning.detail} (<${proofId.value}>)`; + warning.detail = `${warning.detail} (graph ID: <${proofId.value}>)`; }); } report.errors = [...report.errors, ...localErrors]; diff --git a/lib/errors.ts b/lib/types.ts similarity index 60% rename from lib/errors.ts rename to lib/types.ts index 0bd7571..cb6ec0a 100644 --- a/lib/errors.ts +++ b/lib/types.ts @@ -1,3 +1,36 @@ +/** + * Common types and classes. + * + * @packageDocumentation + */ + +import * as rdf from '@rdfjs/types'; + +export enum Cryptosuites { + ecdsa = "ecdsa-2022", + rsa_pss = "rdfjs-di-rsa-pss", + rsa_ssa = "rdfjs-di-rss-ssa" +} + +export interface VerificationResult extends Errors { + verified: boolean, + verifiedDocument: rdf.DatasetCore | null, +} + +export interface KeyPair { + public: JsonWebKey, + private: JsonWebKey, +} + +export interface KeyMetadata { + controller?: string, + expires?: string, + revoked?: string, + cryptosuite?: string, +} + +export interface KeyData extends KeyMetadata, KeyPair {} + /***************************************************************************************** * Errors *****************************************************************************************/ @@ -5,7 +38,7 @@ /** * Superclass for the various error conditions. The entries are based on the DI specification. */ -export abstract class ProblemDetail { +export abstract class ProblemDetail extends Error { /** The vocabulary URL for the entry */ type: string; /** The error code */ @@ -16,22 +49,22 @@ export abstract class ProblemDetail { detail: string; constructor(detail: string, title: string, code: number) { - // super(detail); + super(detail); this.detail = detail; - this.title = title; - this.code = code; - this.type = `https://w3id.org/security#${title.replace(' ', '_').toUpperCase()}`; + this.title = title; + this.code = code; + this.type = `https://w3id.org/security#${title.replace(' ', '_').toUpperCase()}`; } } export interface Errors { warnings: ProblemDetail[]; - errors: ProblemDetail[]; -} + errors: ProblemDetail[]; +} export class Proof_Generation_Error extends ProblemDetail { constructor(detail: string) { - super(detail,'Proof generation error', -16); + super(detail, 'Proof generation error', -16); } } @@ -58,5 +91,3 @@ export class Unclassified_Error extends ProblemDetail { super(detail, 'Unclassified error', -100); } } - - diff --git a/lib/utils.ts b/lib/utils.ts index 3044bf4..68bd12c 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -1,12 +1,19 @@ /** - * Collection of smaller utilities needed for the DI implementation. Put into a separate file for an easier maintenance; not meant - * to be part of the external API + * Collection of smaller utilities needed for the DI implementation. + * + * Put into a separate file for an easier maintenance; not meant + * to be part of the external API. + * They are not exported (via `index.ts`) to + * package users. + * + * @packageDocumentation + * */ import { RDFC10 } from 'rdfjs-c14n'; import * as rdf from '@rdfjs/types'; import * as n3 from 'n3'; -import { KeyPair } from './common'; +import { KeyPair, KeyMetadata } from './types'; const { namedNode } = n3.DataFactory; /*************************************************************************************** @@ -109,20 +116,9 @@ export class DatasetMap { } -// /***************************************************************************************** -// * Misc Utility Functions -// *****************************************************************************************/ -// /* -// These two came from perplexity, hopefully it is correct... -// */ - -// const base64ToUrl = (base64String: string): string => { -// return base64String.replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, ''); -// }; - -// const urlToBase64 = (base64Url: string): string => { -// return base64Url.replace(/-/g, '+').replace(/_/g, '/'); -// }; +/***************************************************************************************** + * Misc Utility Functions + *****************************************************************************************/ /** * Type guard to check if an object implements the rdf.DatasetCore interface. @@ -144,19 +140,10 @@ export function isDatasetCore(obj: any): obj is rdf.DatasetCore { * @param obj * @returns */ -export function isKeyPair(obj: any): obj is KeyPair { +export function isKeyData(obj: any): obj is KeyMetadata { return (obj as KeyPair).public !== undefined && (obj as KeyPair).private !== undefined; } - -// /** -// * Text to array buffer, needed for crypto operations -// * @param text -// */ -// export function textToArrayBuffer(text: string): ArrayBuffer { -// return (new TextEncoder()).encode(text).buffer; -// } - /** * Calculate the canonical hash of a dataset using the implementation of RDFC 1.0. * @@ -170,48 +157,6 @@ export async function calculateDatasetHash(dataset: rdf.DatasetCore): Promise { +export async function get_keys(): Promise { const raw_keys: string = await fs.readFile('testing/keys.json', 'utf-8'); return JSON.parse(raw_keys); } diff --git a/testing/run/main.ts b/testing/run/main.ts index 8fa105f..e3e99ac 100644 --- a/testing/run/main.ts +++ b/testing/run/main.ts @@ -3,9 +3,11 @@ import { Command } from 'commander'; import * as process from 'node:process'; -import { KeyPair, DI_ECDSA, VerificationResult } from '../../index'; -import { get_quads, DataFactory, write_quads } from './rdfn3'; -import { get_keys, OSet } from './keys'; +import { KeyData, VerificationResult, + generateProofGraph, verifyProofGraph, + embedProofGraph, verifyEmbeddedProofGraph } from '../../index'; +import { get_quads, DataFactory, write_quads } from './rdfn3'; +import { get_keys, OSet } from './keys'; function displayVerificationResult(result: VerificationResult): void { console.log(`>>>> Verification result`); @@ -56,36 +58,48 @@ async function main() { const input = (program.args.length === 0) ? 'small.ttl' : program.args[0]; const anchor = options.anchor ? DataFactory.namedNode(`file:///${input}`) : undefined ; - const keyPairs: KeyPair[] = await get_keys(); - // const keyPair = await generateKeys(); + const keyPairs: KeyData[] = await get_keys(); const dataset = await get_quads(input); - const di_ecdsa = new DI_ECDSA(); - if (embed) { - const finalKeys = (proof_set) ? new OSet(keyPairs) : ((proof_chain) ? keyPairs: keyPairs[0]); - if (!quiet) console.log(`>>> Generating embedded proofs for "${input}", with anchor at "${JSON.stringify(anchor)}"\n`); - const proof = await di_ecdsa.embedProofGraph(dataset, finalKeys, anchor); - if (!no_output) write_quads(proof); - if (verify) { - const result = await di_ecdsa.verifyEmbeddedProofGraph(proof); - if (!quiet) displayVerificationResult(result); - } else { - console.log(`>>> No verification was required`) - } - } else { - let result: boolean[]; - if (proof_set || proof_chain) { - const proofs: rdf.DatasetCore[] = await di_ecdsa.generateProofGraph(dataset, keyPairs); - result = (verify) ? await di_ecdsa.verifyProofGraph(dataset, proofs) : [false]; - if (!no_output) for (const proof of proofs) write_quads(proof); - } else { - const keyPair: KeyPair = keyPairs[0]; - const proof: rdf.DatasetCore = await di_ecdsa.generateProofGraph(dataset, keyPair); - result = (verify) ? [await di_ecdsa.verifyProofGraph(dataset, proof)] : [false]; + try { + if (embed) { + const finalKeys = (proof_set) ? new OSet(keyPairs) : ((proof_chain) ? keyPairs: keyPairs[0]); + if (!quiet) console.log(`>>> Generating embedded proofs for "${input}", with anchor at "${JSON.stringify(anchor)}"\n`); + const proof = await embedProofGraph(dataset, finalKeys, anchor); if (!no_output) write_quads(proof); + if (verify) { + const result = await verifyEmbeddedProofGraph(proof, anchor); + if (!quiet) displayVerificationResult(result); + } else { + console.log(`>>> No verification was required`) + } + } else { + let result: boolean[]; + if (proof_set || proof_chain) { + const proofs: rdf.DatasetCore[] = await generateProofGraph(dataset, keyPairs); + if (verify) { + const result = await verifyProofGraph(dataset, proofs); + if (!quiet) displayVerificationResult(result); + } else { + console.log(`>>> No verification was required`); + } + if (!no_output) for (const proof of proofs) write_quads(proof); + } else { + // Simplest alternative: single key, single output proof + const keyPair: KeyData = keyPairs[0]; + const proof: rdf.DatasetCore = await generateProofGraph(dataset, keyPair); + if (verify) { + const result = await verifyProofGraph(dataset, proof); + if (!quiet) displayVerificationResult(result); + } else { + console.log(`>>> No verification was required`); + } + if (!no_output) write_quads(proof); + } } - if (!quiet) console.log(verify ? `>>> Verification result: ${result}` : `>>> No verification was required`); + } catch(e) { + console.log(`${e.message}`); } } diff --git a/testing/tests/small.ttl b/testing/tests/small.ttl index 745ddcb..cafb829 100644 --- a/testing/tests/small.ttl +++ b/testing/tests/small.ttl @@ -5,6 +5,7 @@ @prefix doap: . @prefix earl: . @prefix xsd: . +@prefix sec: . foaf:primaryTopic ; dc:issued "2024-02-14T13:21:08.700Z"^^xsd:dateTime ; @@ -43,3 +44,4 @@ foaf:name "Ivan Herman"; foaf:title "Implementor"; foaf:homepage . +