diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 9639f9b5..d66c5c07 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -9,7 +9,7 @@ updates:
- package-ecosystem: "cargo"
directory: "/homestar-core"
commit-message:
- prefix: "chore"
+ prefix: "[chore(core)]"
include: "scope"
target-branch: "main"
schedule:
@@ -18,7 +18,7 @@ updates:
- package-ecosystem: "cargo"
directory: "/homestar-runtime"
commit-message:
- prefix: "chore"
+ prefix: "[chore(runtime)]"
include: "scope"
target-branch: "main"
schedule:
@@ -27,7 +27,7 @@ updates:
- package-ecosystem: "cargo"
directory: "/homestar-guest-wasm"
commit-message:
- prefix: "chore"
+ prefix: "[chore(guest-wasm)]"
include: "scope"
target-branch: "main"
schedule:
@@ -36,7 +36,7 @@ updates:
- package-ecosystem: "cargo"
directory: "/homestar-wasm"
commit-message:
- prefix: "chore"
+ prefix: "[chore(wasm)]"
include: "scope"
target-branch: "main"
schedule:
@@ -45,7 +45,7 @@ updates:
- package-ecosystem: "github-actions"
directory: "/"
commit-message:
- prefix: "chore(ci)"
+ prefix: "[chore(ci)]"
include: "scope"
target-branch: "main"
schedule:
diff --git a/.ignore b/.ignore
new file mode 100644
index 00000000..ce6103f8
--- /dev/null
+++ b/.ignore
@@ -0,0 +1,16 @@
+# cargo-watch ignores
+
+docker
+flake.lock
+release-please-config.json
+deny.toml
+diesel.toml
+LICENSE
+*.nix
+*.md
+
+.envrc
+.dockerignore
+.gitignore
+.release-please-manifest.json
+.pre-commit-config.yaml
diff --git a/Cargo.lock b/Cargo.lock
index 7cf330ad..012f5ac9 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2580,6 +2580,7 @@ dependencies = [
"enum-assoc",
"generic-array",
"indexmap",
+ "json",
"libipld",
"libsqlite3-sys",
"proptest",
diff --git a/Cargo.toml b/Cargo.toml
index e4e3c753..19423c00 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -18,6 +18,7 @@ rust-version = "1.66.0"
[workspace.dependencies]
anyhow = { version = "1.0", features = ["backtrace"] }
+thiserror = "1.0"
tokio = { version = "1.26", features = ["fs", "io-util", "io-std", "macros", "rt", "rt-multi-thread"] }
tracing = "0.1"
diff --git a/README.md b/README.md
index 42728c81..2a4f1982 100644
--- a/README.md
+++ b/README.md
@@ -12,20 +12,32 @@
+
+
+
-
-
+
+
+
+
+
+
+
+
-
+
-
+
+
+
+
@@ -69,14 +81,14 @@ represents the `homestar` runtime.
- Running the tests:
-We recommend using [cargo nextest][cargo-nextest], which is installed via
-[our Nix flake](#nix) or can be [installed separately][cargo-nextest-install].
+We recommend using [cargo nextest][cargo-nextest], which is installed by default
+in our [Nix flake](#nix) or can be [installed separately][cargo-nextest-install].
```console
cargo nextest run --all-features --no-capture
```
-Otherwise, the above command looks like this using the default `cargo test`:
+The above command translates to this using the default `cargo test`:
```console
cargo test --all-features -- --nocapture
@@ -105,7 +117,7 @@ with `experimental` and `buildkit` set to `true`, for example:
- Build a multi-plaform Docker image via [buildx][buildx]:
```console
- docker buildx build --platform=linux/amd64,linux/arm64 -t homestar-runtime --progress=plain .
+ docker buildx build --file docker/Dockerfile --platform=linux/amd64,linux/arm64 -t homestar-runtime --progress=plain .
```
- Run a Docker image (depending on your platform):
@@ -149,8 +161,27 @@ hooks. Please run this before every commit and/or push.
- We recommend leveraging [cargo-watch][cargo-watch],
[cargo-expand][cargo-expand] and [irust][irust] for Rust development.
-- We also recommend using [cargo-udeps][cargo-udeps] for removing unused dependencies
- before commits and pull-requests.
+- We also recommend using [cargo-udeps][cargo-udeps] for removing unused
+ dependencies before commits and pull-requests.
+- If using our [Nix flake][nix-flake], there are a number of handy
+ command shortcuts available for working with `cargo-watch`, `diesel`, and
+ other binaries, including:
+ * `ci`, which runs a sequence of commands to check formatting, lints, release
+ builds, and tests
+ * `db` and `db-reset` for running `diesel` setup and migrations
+ * `compile-wasm` for compiling [homestar-guest-wasm](./homestar-guest-wasm),
+ a [wit-bindgen][]-driven example, to the `wasm32-unknown-unknown` target
+ * `docker-` for running docker builds
+ * `nx-test`, which translates to `cargo nextest run && cargo test --doc`
+ * `x-test` for testing continuously as files change, translating to
+ `cargo watch -c -s "cargo nextest run && cargo test --doc"`
+ * `x-` for running a variety of `cargo watch`
+ execution stages
+ * `nx-test-`, which is just like `nx-test`, but adds `all` or `0`
+ for running tests either with the `all-features` flag or
+ `no-default-features` flag, respectively.
+ * `x--` for package-specific
+ builds, tests, etc.
### Conventional Commits
@@ -221,3 +252,4 @@ conditions.
[pre-commit]: https://pre-commit.com/
[seamless-services]: https://youtu.be/Kr3B3sXh_VA
[ucan-invocation]: https://github.com/ucan-wg/invocation
+[wit-bindgen]: https://github.com/bytecodealliance/wit-bindgen
diff --git a/flake.lock b/flake.lock
index 9900418e..5a7c0391 100644
--- a/flake.lock
+++ b/flake.lock
@@ -1,12 +1,15 @@
{
"nodes": {
"flake-utils": {
+ "inputs": {
+ "systems": "systems"
+ },
"locked": {
- "lastModified": 1667395993,
- "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
+ "lastModified": 1685518550,
+ "narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=",
"owner": "numtide",
"repo": "flake-utils",
- "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
+ "rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef",
"type": "github"
},
"original": {
@@ -17,16 +20,16 @@
},
"nixpkgs": {
"locked": {
- "lastModified": 1678872516,
- "narHash": "sha256-/E1YwtMtFAu2KUQKV/1+KFuReYPANM2Rzehk84VxVoc=",
+ "lastModified": 1686331006,
+ "narHash": "sha256-hElRDWUNG655aqF0awu+h5cmDN+I/dQcChRt2tGuGGU=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "9b8e5abb18324c7fe9f07cb100c3cd4a29cda8b8",
+ "rev": "85bcb95aa83be667e562e781e9d186c57a07d757",
"type": "github"
},
"original": {
"id": "nixpkgs",
- "ref": "nixos-22.11",
+ "ref": "nixos-23.05",
"type": "indirect"
}
},
@@ -47,11 +50,11 @@
]
},
"locked": {
- "lastModified": 1674095406,
- "narHash": "sha256-RexH/1rZTiX4OhdYkuJP3MuANJ+JRgoLKL60iHm//T0=",
+ "lastModified": 1686537156,
+ "narHash": "sha256-mJD80brS6h6P4jzwdKID0S9RvfyiruxgJbXvPPIDqF0=",
"owner": "oxalica",
"repo": "rust-overlay",
- "rev": "5f7315b9800e2e500e6834767a57e39f7dbfd495",
+ "rev": "e75da5cfc7da874401decaa88f4ccb3b4d64d20d",
"type": "github"
},
"original": {
@@ -59,6 +62,21 @@
"repo": "rust-overlay",
"type": "github"
}
+ },
+ "systems": {
+ "locked": {
+ "lastModified": 1681028828,
+ "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
+ "owner": "nix-systems",
+ "repo": "default",
+ "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nix-systems",
+ "repo": "default",
+ "type": "github"
+ }
}
},
"root": "root",
diff --git a/flake.nix b/flake.nix
index 339f1a8c..ea8a6bf9 100644
--- a/flake.nix
+++ b/flake.nix
@@ -2,9 +2,8 @@
description = "homestar";
inputs = {
- nixpkgs.url = "nixpkgs/nixos-22.11";
+ nixpkgs.url = "nixpkgs/nixos-23.05";
flake-utils.url = "github:numtide/flake-utils";
-
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
@@ -45,6 +44,109 @@
cargo-watch
twiggy
];
+
+ ci = pkgs.writeScriptBin "ci" ''
+ cargo fmt --check
+ cargo clippy
+ cargo build --release
+ nx-test
+ '';
+
+ db = pkgs.writeScriptBin "db" ''
+ diesel setup
+ diesel migration run
+ '';
+
+ dbReset = pkgs.writeScriptBin "db-reset" ''
+ diesel database reset
+ diesel setup
+ diesel migration run
+ '';
+
+ compileWasm = pkgs.writeScriptBin "compile-wasm" ''
+ cargo build -p homestar-guest-wasm --target wasm32-unknown-unknown --release
+ '';
+
+ dockerBuild = arch:
+ pkgs.writeScriptBin "docker-${arch}" ''
+ docker buildx build --file docker/Dockerfile --platform=linux/${arch} -t homestar-runtime --progress=plain .
+ '';
+
+ xFunc = cmd:
+ pkgs.writeScriptBin "x-${cmd}" ''
+ cargo watch -c -x ${cmd}
+ '';
+
+ xFuncAll = cmd:
+ pkgs.writeScriptBin "x-${cmd}-all" ''
+ cargo watch -c -s "cargo ${cmd} --all-features"
+ '';
+
+ xFuncNoDefault = cmd:
+ pkgs.writeScriptBin "x-${cmd}-0" ''
+ cargo watch -c -s "cargo ${cmd} --no-default-features"
+ '';
+
+ xFuncPackage = cmd: crate:
+ pkgs.writeScriptBin "x-${cmd}-${crate}" ''
+ cargo watch -c -s "cargo ${cmd} -p homestar-${crate} --all-features"
+ '';
+
+ xFuncTest = pkgs.writeScriptBin "x-test" ''
+ cargo watch -c -s "cargo nextest run && cargo test --doc"
+ '';
+
+ xFuncTestAll = pkgs.writeScriptBin "x-test-all" ''
+ cargo watch -c -s "cargo nextest run --all-features --nocapture \
+ && cargo test --doc --all-features"
+ '';
+
+ xFuncTestNoDefault = pkgs.writeScriptBin "x-test-all" ''
+ cargo watch -c -s "cargo nextest run --no-default-features --nocapture \
+ && cargo test --doc --no-default-features"
+ '';
+
+ xFuncTestPackage = crate:
+ pkgs.writeScriptBin "x-test-${crate}" ''
+ cargo watch -c -s "cargo nextest run -p homestar-${crate} --all-features \
+ && cargo test --doc -p homestar-${crate} --all-features"
+ '';
+
+ nxTest = pkgs.writeScriptBin "nx-test" ''
+ cargo nextest run
+ cargo test --doc
+ '';
+
+ nxTestAll = pkgs.writeScriptBin "nx-test-all" ''
+ cargo nextest run --all-features --nocapture
+ cargo test --doc --all-features
+ '';
+
+ nxTestNoDefault = pkgs.writeScriptBin "nx-test-0" ''
+ cargo nextest run --no-default-features --nocapture
+ cargo test --doc --no-default-features
+ '';
+
+ scripts = [
+ ci
+ db
+ dbReset
+ compileWasm
+ (builtins.map (arch: dockerBuild arch) ["amd64" "arm64"])
+ (builtins.map (cmd: xFunc cmd) ["build" "check" "run" "clippy"])
+ (builtins.map (cmd: xFuncAll cmd) ["build" "check" "run" "clippy"])
+ (builtins.map (cmd: xFuncNoDefault cmd) ["build" "check" "run" "clippy"])
+ (builtins.map (cmd: xFuncPackage cmd "core") ["build" "check" "run" "clippy"])
+ (builtins.map (cmd: xFuncPackage cmd "wasm") ["build" "check" "run" "clippy"])
+ (builtins.map (cmd: xFuncPackage cmd "runtime") ["build" "check" "run" "clippy"])
+ xFuncTest
+ xFuncTestAll
+ xFuncTestNoDefault
+ (builtins.map (crate: xFuncTestPackage crate) ["core" "wasm" "guest-wasm" "runtime"])
+ nxTest
+ nxTestAll
+ nxTestNoDefault
+ ];
in rec
{
devShells.default = pkgs.mkShell {
@@ -57,16 +159,17 @@
nightly-rustfmt
rust-toolchain
rust-analyzer
+ rustup
pkg-config
pre-commit
protobuf
- rustup
diesel-cli
direnv
self.packages.${system}.irust
]
++ format-pkgs
++ cargo-installs
+ ++ scripts
++ lib.optionals stdenv.isDarwin [
darwin.apple_sdk.frameworks.Security
darwin.apple_sdk.frameworks.CoreFoundation
@@ -92,6 +195,8 @@
doCheck = false;
cargoSha256 = "sha256-FmsD3ajMqpPrTkXCX2anC+cmm0a2xuP+3FHqzj56Ma4=";
};
+
+ formatter = pkgs.alejandra;
}
);
}
diff --git a/homestar-core/Cargo.toml b/homestar-core/Cargo.toml
index be5819be..6eee8442 100644
--- a/homestar-core/Cargo.toml
+++ b/homestar-core/Cargo.toml
@@ -32,7 +32,7 @@ libsqlite3-sys = { version = "0.26", features = ["bundled"] }
proptest = { version = "1.1", optional = true }
serde = { version = "1.0", features = ["derive"] }
signature = "2.0"
-thiserror = "1.0"
+thiserror = { workspace = true }
tracing = { workspace = true }
ucan = "0.1"
url = "2.3"
@@ -40,6 +40,7 @@ xid = "1.0"
[dev-dependencies]
criterion = "0.4"
+json = "0.12"
[features]
default = []
diff --git a/homestar-core/src/consts.rs b/homestar-core/src/consts.rs
index df94e5b3..ad5c9194 100644
--- a/homestar-core/src/consts.rs
+++ b/homestar-core/src/consts.rs
@@ -1,4 +1,6 @@
//! Exported global constants.
-/// SemVer-formatted version of the UCAN Invocation Specification.
+/// SemVer-formatted version of the UCAN Invocation Specification.
pub const INVOCATION_VERSION: &str = "0.2.0";
+/// DagCbor codec.
+pub const DAG_CBOR: u64 = 0x71;
diff --git a/homestar-core/src/lib.rs b/homestar-core/src/lib.rs
index 4596d257..9fe6d56c 100644
--- a/homestar-core/src/lib.rs
+++ b/homestar-core/src/lib.rs
@@ -18,10 +18,13 @@
//! [Ucan invocation]:
pub mod consts;
+pub mod macros;
#[cfg(any(test, feature = "test_utils"))]
#[cfg_attr(docsrs, doc(cfg(feature = "test_utils")))]
pub mod test_utils;
mod unit;
+
pub mod workflow;
pub use consts::*;
pub use unit::*;
+pub use workflow::Workflow;
diff --git a/homestar-core/src/macros.rs b/homestar-core/src/macros.rs
new file mode 100644
index 00000000..645c00b2
--- /dev/null
+++ b/homestar-core/src/macros.rs
@@ -0,0 +1,68 @@
+//! Macros for cross-crate export.
+
+/// Return early with an error.
+///
+/// Modelled after [anyhow::bail].
+///
+/// # Example
+///
+/// ```
+/// use homestar_core::{workflow, bail, Unit};
+///
+/// fn has_permission(user: usize, resource: usize) -> bool {
+/// true
+/// }
+///
+/// # fn main() -> Result<(), workflow::Error> {
+/// # let user = 0;
+/// # let resource = 0;
+/// #
+///
+/// if !has_permission(user, resource) {
+/// bail!(workflow::Error::UnknownError);
+/// }
+///
+/// # Ok(())
+/// # }
+/// ```
+#[macro_export]
+macro_rules! bail {
+ ($e:expr) => {
+ return Err($e);
+ };
+}
+
+/// /// Return early with an error if a condition is not satisfied.
+///
+/// Analogously to `assert!`, `ensure!` takes a condition and exits the function
+/// if the condition fails. Unlike `assert!`, `ensure!` returns an `Error`
+/// rather than panicking.
+///
+/// Modelled after [anyhow::ensure].
+///
+/// # Example
+///
+/// ```
+/// use homestar_core::{workflow, ensure, Unit};
+///
+/// #
+/// # fn main() -> Result<(), workflow::Error> {
+/// # let user = 1;
+/// #
+/// ensure!(
+/// user < 2,
+/// workflow::Error::ConditionNotMet(
+/// "only user 0 and 1 are allowed".to_string()
+/// )
+/// );
+/// # Ok(())
+/// # }
+/// ```
+#[macro_export(local_inner_macros)]
+macro_rules! ensure {
+ ($cond:expr, $e:expr) => {
+ if !($cond) {
+ bail!($e);
+ }
+ };
+}
diff --git a/homestar-core/src/unit.rs b/homestar-core/src/unit.rs
index af3fe731..d6418588 100644
--- a/homestar-core/src/unit.rs
+++ b/homestar-core/src/unit.rs
@@ -4,10 +4,12 @@
//! [Tasks]: crate::workflow::Task
//! [Inputs]: crate::workflow::Input
//! [Invocations]: crate::workflow::Invocation
+//!
use crate::workflow::{
+ error::InputParseError,
input::{self, Args, Parsed},
- Input,
+ Error, Input,
};
use libipld::Ipld;
@@ -30,7 +32,7 @@ impl From for Unit {
// Default implementation.
impl input::Parse for Input {
- fn parse(&self) -> anyhow::Result> {
+ fn parse(&self) -> Result, InputParseError> {
let args = match Ipld::try_from(self.to_owned())? {
Ipld::List(v) => Ipld::List(v).try_into()?,
ipld => Args::new(vec![ipld.try_into()?]),
@@ -39,3 +41,9 @@ impl input::Parse for Input {
Ok(Parsed::with(args))
}
}
+
+impl From> for InputParseError {
+ fn from(err: Error) -> Self {
+ InputParseError::WorkflowError(err.into())
+ }
+}
diff --git a/homestar-core/src/workflow.rs b/homestar-core/src/workflow.rs
index 68a34ca1..7127ca37 100644
--- a/homestar-core/src/workflow.rs
+++ b/homestar-core/src/workflow.rs
@@ -2,8 +2,21 @@
//!
//! [Ucan invocation]:
+use self::Error as WorkflowError;
+use crate::{bail, Unit, DAG_CBOR};
+use libipld::{
+ cbor::DagCborCodec,
+ json::DagJsonCodec,
+ multihash::{Code, MultihashDigest},
+ prelude::Codec,
+ serde::from_ipld,
+ Cid, Ipld,
+};
+use std::collections::BTreeMap;
+
mod ability;
pub mod config;
+pub mod error;
pub mod input;
pub mod instruction;
mod instruction_result;
@@ -16,6 +29,7 @@ pub mod receipt;
pub mod task;
pub use ability::*;
+pub use error::Error;
pub use input::Input;
pub use instruction::Instruction;
pub use instruction_result::*;
@@ -26,9 +40,177 @@ pub use pointer::Pointer;
pub use receipt::Receipt;
pub use task::Task;
+const TASKS_KEY: &str = "tasks";
+
/// Generic link, cid => T [IndexMap] for storing
/// invoked, raw values in-memory and using them to
/// resolve other steps within a runtime's workflow.
///
/// [IndexMap]: indexmap::IndexMap
pub type LinkMap = indexmap::IndexMap;
+
+/// Workflow composed of [tasks].
+///
+/// [tasks]: Task
+#[derive(Debug, Clone, PartialEq)]
+pub struct Workflow<'a, T> {
+ tasks: Vec>,
+}
+
+impl<'a, T> Workflow<'a, T> {
+ /// Create a new [Workflow] given a set of tasks.
+ pub fn new(tasks: Vec>) -> Self {
+ Self { tasks }
+ }
+
+ /// Return a [Workflow]'s [tasks] vector.
+ ///
+ /// [tasks]: Task
+ pub fn tasks(self) -> Vec> {
+ self.tasks
+ }
+
+ /// Return a reference to [Workflow]'s [tasks] vector.
+ ///
+ /// [tasks]: Task
+ pub fn tasks_ref(&self) -> &Vec> {
+ &self.tasks
+ }
+
+ /// Length of workflow given a series of [tasks].
+ ///
+ /// [tasks]: Task
+ pub fn len(&self) -> u32 {
+ self.tasks.len() as u32
+ }
+
+ /// Whether [Workflow] contains [tasks] or not.
+ ///
+ /// [tasks]: Task
+ pub fn is_empty(&self) -> bool {
+ self.tasks.is_empty()
+ }
+
+ /// Return workflow as stringified Json.
+ pub fn to_json(self) -> Result>
+ where
+ Ipld: From>,
+ {
+ let encoded = DagJsonCodec.encode(&Ipld::from(self))?;
+ let s = std::str::from_utf8(&encoded)?;
+ Ok(s.to_string())
+ }
+}
+
+impl<'a, T> From> for Ipld
+where
+ Ipld: From>,
+{
+ fn from(workflow: Workflow<'a, T>) -> Self {
+ Ipld::Map(BTreeMap::from([(
+ TASKS_KEY.into(),
+ Ipld::List(
+ workflow
+ .tasks
+ .into_iter()
+ .map(Ipld::from)
+ .collect::>(),
+ ),
+ )]))
+ }
+}
+
+impl<'a, T> TryFrom for Workflow<'a, T>
+where
+ T: From,
+{
+ type Error = WorkflowError;
+
+ fn try_from(ipld: Ipld) -> Result {
+ let map = from_ipld::>(ipld)?;
+ let ipld = map
+ .get(TASKS_KEY)
+ .ok_or_else(|| WorkflowError::::MissingFieldError(TASKS_KEY.to_string()))?;
+
+ let tasks = if let Ipld::List(tasks) = ipld {
+ tasks.iter().try_fold(vec![], |mut acc, ipld| {
+ acc.push(ipld.to_owned().try_into()?);
+ Ok::<_, Self::Error>(acc)
+ })?
+ } else {
+ bail!(WorkflowError::not_an_ipld_list());
+ };
+
+ Ok(Self { tasks })
+ }
+}
+
+impl<'a, T> TryFrom> for Cid
+where
+ Ipld: From>,
+{
+ type Error = WorkflowError;
+
+ fn try_from(workflow: Workflow<'a, T>) -> Result {
+ let ipld: Ipld = workflow.into();
+ let bytes = DagCborCodec.encode(&ipld)?;
+ let hash = Code::Sha3_256.digest(&bytes);
+ Ok(Cid::new_v1(DAG_CBOR, hash))
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::{
+ test_utils,
+ workflow::{config::Resources, instruction::RunInstruction, prf::UcanPrf},
+ Unit,
+ };
+
+ #[test]
+ fn workflow_to_json() {
+ let config = Resources::default();
+ let (instruction1, instruction2, _) =
+ test_utils::workflow::related_wasm_instructions::();
+ let task1 = Task::new(
+ RunInstruction::Expanded(instruction1),
+ config.clone().into(),
+ UcanPrf::default(),
+ );
+ let task2 = Task::new(
+ RunInstruction::Expanded(instruction2),
+ config.into(),
+ UcanPrf::default(),
+ );
+
+ let workflow = Workflow::new(vec![task1.clone(), task2.clone()]);
+
+ let json_string = workflow.to_json().unwrap();
+
+ let json_val = json::from(json_string.clone());
+ assert_eq!(json_string, json_val.to_string());
+ }
+
+ #[test]
+ fn ipld_roundtrip_workflow() {
+ let config = Resources::default();
+ let (instruction1, instruction2, _) =
+ test_utils::workflow::related_wasm_instructions::();
+ let task1 = Task::new(
+ RunInstruction::Expanded(instruction1),
+ config.clone().into(),
+ UcanPrf::default(),
+ );
+ let task2 = Task::new(
+ RunInstruction::Expanded(instruction2),
+ config.into(),
+ UcanPrf::default(),
+ );
+
+ let workflow = Workflow::new(vec![task1.clone(), task2.clone()]);
+ let ipld = Ipld::from(workflow.clone());
+ let ipld_to_workflow = ipld.try_into().unwrap();
+ assert_eq!(workflow, ipld_to_workflow);
+ }
+}
diff --git a/homestar-core/src/workflow/ability.rs b/homestar-core/src/workflow/ability.rs
index 421a6b41..d76f1388 100644
--- a/homestar-core/src/workflow/ability.rs
+++ b/homestar-core/src/workflow/ability.rs
@@ -3,6 +3,7 @@
//! [Resource]: url::Url
//! [UCAN Ability]:
+use crate::{workflow, Unit};
use libipld::{serde::from_ipld, Ipld};
use serde::{Deserialize, Serialize};
use std::{borrow::Cow, fmt};
@@ -63,7 +64,7 @@ impl From for Ipld {
}
impl TryFrom for Ability {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: Ipld) -> Result {
let ability = from_ipld::(ipld)?;
diff --git a/homestar-core/src/workflow/config.rs b/homestar-core/src/workflow/config.rs
index ea940576..f9e12a06 100644
--- a/homestar-core/src/workflow/config.rs
+++ b/homestar-core/src/workflow/config.rs
@@ -4,6 +4,7 @@
//! [workflow]: super
//! [Invocations]: super::Invocation
+use crate::{workflow, Unit};
use libipld::{serde::from_ipld, Ipld};
use serde::{Deserialize, Serialize};
use std::{collections::BTreeMap, default::Default, time::Duration};
@@ -76,7 +77,7 @@ impl From for Ipld {
}
impl<'a> TryFrom<&'a Ipld> for Resources {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: &Ipld) -> Result {
Resources::try_from(ipld.to_owned())
@@ -84,7 +85,7 @@ impl<'a> TryFrom<&'a Ipld> for Resources {
}
impl TryFrom for Resources {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: Ipld) -> Result {
let map = from_ipld::>(ipld)?;
diff --git a/homestar-core/src/workflow/error.rs b/homestar-core/src/workflow/error.rs
new file mode 100644
index 00000000..990770de
--- /dev/null
+++ b/homestar-core/src/workflow/error.rs
@@ -0,0 +1,163 @@
+//! Error types and implementations for [Workflow] interaction(s).
+//!
+//! [Workflow]: crate::Workflow
+
+use crate::{
+ workflow::{input::Args, Input},
+ Unit,
+};
+use libipld::Ipld;
+use serde::de::Error as DeError;
+
+/// Generic error type for [Workflow] use cases.
+///
+/// [Workflow]: crate::Workflow
+#[derive(thiserror::Error, Debug)]
+pub enum Error {
+ /// Error encoding structure to a [Cid].
+ ///
+ /// [Cid]: libipld::cid::Cid
+ #[error("failed to encode CID: {0}")]
+ CidError(#[from] libipld::cid::Error),
+ /// Error thrown when condition or dynamic check is not met.
+ #[error("condition not met: {0}")]
+ ConditionNotMet(String),
+ /// Failure to decode/encode from/to DagCbor.
+ ///
+ /// The underlying error is a [anyhow::Error], per the
+ /// [DagCborCodec] implementation.
+ ///
+ /// [DagCborCodec]: libipld::cbor::DagCborCodec
+ #[error("failed to decode/encode DAG-CBOR: {0}")]
+ DagCborTranslationError(#[from] anyhow::Error),
+ /// Error converting from [Ipld] structure.
+ #[error("cannot convert from Ipld structure: {0}")]
+ FromIpldError(#[from] libipld::error::SerdeError),
+ /// Invalid match discriminant or enumeration.
+ #[error("invalid discriminant {0:#?}")]
+ InvalidDiscriminant(T),
+ /// Error related to a missing a field in a structure or key
+ /// in a map.
+ #[error("no {0} field set")]
+ MissingFieldError(String),
+ /// Error during parsing a [Url].
+ ///
+ /// Transparently forwards from [url::ParseError]'s `source` and
+ /// `Display` methods through to an underlying error.
+ ///
+ /// [Url]: url::Url
+ #[error(transparent)]
+ ParseResourceError(#[from] url::ParseError),
+ /// Generic unknown error.
+ #[error("unknown error")]
+ UnknownError,
+ /// Error when attempting to interpret a sequence of [u8]
+ /// as a string.
+ ///
+ /// Transparently forwards from [std::str::Utf8Error]'s `source` and
+ /// `Display` methods through to an underlying error.
+ #[error(transparent)]
+ Utf8Error(#[from] std::str::Utf8Error),
+}
+
+impl Error {
+ /// Return a [SerdeError] when returning an [Ipld] structure
+ /// that's not expected at the call-site.
+ ///
+ /// [SerdeError]: libipld::error::SerdeError
+ pub fn unexpected_ipld(ipld: Ipld) -> Self {
+ Error::FromIpldError(libipld::error::SerdeError::custom(format!(
+ "unexpected Ipld conversion: {ipld:#?}"
+ )))
+ }
+
+ /// Return an `invalid type` [SerdeError] when not matching an expected
+ /// [Ipld] list/sequence type.
+ ///
+ /// [SerdeError]: libipld::error::SerdeError
+ pub fn not_an_ipld_list() -> Self {
+ Error::FromIpldError(libipld::error::SerdeError::invalid_type(
+ serde::de::Unexpected::Seq,
+ &"an Ipld list / sequence",
+ ))
+ }
+}
+
+impl From> for Error {
+ fn from(_err: Error) -> Self {
+ Error::UnknownError
+ }
+}
+
+impl From> for Error {
+ fn from(_err: Error) -> Error {
+ Error::UnknownError
+ }
+}
+
+impl From for Error {
+ fn from(err: std::convert::Infallible) -> Self {
+ match err {}
+ }
+}
+
+/// Error type for parsing [Workflow] [Input]s.
+///
+/// [Workflow]: crate::Workflow
+#[derive(thiserror::Error, Debug)]
+pub enum InputParseError {
+ /// Error converting from [Ipld] structure.
+ #[error("cannot convert from Ipld structure: {0}")]
+ FromIpldError(#[from] libipld::error::SerdeError),
+ /// Error converting from [Ipld] structure into [Args].
+ #[error("cannot convert from Ipld structure into arguments: {0:#?}")]
+ IpldToArgsError(Args),
+ /// Unexpected [Input] in [Task] structure.
+ ///
+ /// [Task]: crate::workflow::Task
+ #[error("unexpected task input: {0:#?}")]
+ UnexpectedTaskInput(Input),
+ /// Bubble-up conversion and other general [Workflow errors].
+ ///
+ /// [Workflow errors]: Error
+ #[error(transparent)]
+ WorkflowError(#[from] Error),
+}
+
+impl From for InputParseError {
+ fn from(err: std::convert::Infallible) -> Self {
+ match err {}
+ }
+}
+
+/// Error type for resolving promised [Cid]s within [Workflow] [Input]s.
+///
+/// [Cid]: libipld::Cid
+/// [Workflow]: crate::Workflow
+#[derive(thiserror::Error, Debug)]
+pub enum ResolveError {
+ /// Generic runtime error.
+ ///
+ /// Transparently forwards from [anyhow::Error]'s `source` and
+ /// `Display` methods through to an underlying error.
+ #[error(transparent)]
+ RuntimeError(#[from] anyhow::Error),
+ /// Transport error when attempting to resolve [Workflow] [Input]'s [Cid].
+ ///
+ /// [Cid]: libipld::Cid
+ /// [Workflow]: crate::Workflow
+ #[error("transport error during resolve phase of input Cid: {0}")]
+ TransportError(String),
+ /// Unable to resolve a [Cid] within a [Workflow]'s [Input].
+ ///
+ /// [Cid]: libipld::Cid
+ /// [Workflow]: crate::Workflow
+ #[error("error resolving input Cid: {0}")]
+ UnresolvedCidError(String),
+}
+
+impl From for ResolveError {
+ fn from(err: std::convert::Infallible) -> Self {
+ match err {}
+ }
+}
diff --git a/homestar-core/src/workflow/input.rs b/homestar-core/src/workflow/input.rs
index 174ce4b9..609e79f5 100644
--- a/homestar-core/src/workflow/input.rs
+++ b/homestar-core/src/workflow/input.rs
@@ -5,101 +5,17 @@
//! [parse]: Parse::parse
//! [resolve]: Args::resolve
-use super::{
+use crate::workflow::{
+ self,
+ error::ResolveError,
pointer::{Await, AwaitResult, ERR_BRANCH, OK_BRANCH, PTR_BRANCH},
InstructionResult, Pointer,
};
-use anyhow::anyhow;
use libipld::{serde::from_ipld, Cid, Ipld};
use std::{collections::btree_map::BTreeMap, result::Result};
-/// Parsed [Args] consisting of [Inputs] for execution flows, as well as an
-/// optional function name/definition.
-///
-/// TODO: Extend via enumeration for singular objects/values.
-///
-/// [Inputs]: super::Input
-#[derive(Clone, Debug, PartialEq)]
-pub struct Parsed {
- args: Args,
- fun: Option,
-}
-
-impl Parsed {
- /// Initiate [Parsed] data structure with only [Args].
- pub fn with(args: Args) -> Self {
- Parsed { args, fun: None }
- }
-
- /// Initiate [Parsed] data structure with a function name and
- /// [Args].
- pub fn with_fn(fun: String, args: Args) -> Self {
- Parsed {
- args,
- fun: Some(fun),
- }
- }
-
- /// Parsed arguments.
- pub fn args(&self) -> &Args {
- &self.args
- }
-
- /// Turn [Parsed] structure into owned [Args].
- pub fn into_args(self) -> Args {
- self.args
- }
-
- /// Parsed function named.
- pub fn fun(&self) -> Option {
- self.fun.as_ref().map(|f| f.to_string())
- }
-}
-
-impl From> for Args {
- fn from(apply: Parsed) -> Self {
- apply.args
- }
-}
-
-/// Interface for [Instruction] implementations, relying on `homestore-core`
-/// to implement custom parsing specifics.
-///
-/// # Example
-///
-/// ```
-/// use homestar_core::{
-/// workflow::{
-/// input::{Args, Parse}, Ability, Input, Instruction,
-/// },
-/// Unit,
-/// };
-/// use libipld::Ipld;
-/// use url::Url;
-///
-/// let wasm = "bafkreidztuwoszw2dfnzufjpsjmzj67x574qcdm2autnhnv43o3t4zmh7i".to_string();
-/// let resource = Url::parse(format!("ipfs://{wasm}").as_str()).unwrap();
-///
-/// let inst = Instruction::unique(
-/// resource,
-/// Ability::from("wasm/run"),
-/// Input::::Ipld(Ipld::List(vec![Ipld::Bool(true)]))
-/// );
-///
-/// let parsed = inst.input().parse().unwrap();
-///
-/// // turn into Args for invocation:
-/// let args: Args = parsed.try_into().unwrap();
-/// ```
-///
-/// [Instruction]: super::Instruction
-pub trait Parse {
- /// Function returning [Parsed] structure for execution/invocation.
- ///
- /// Note: meant to come before the `resolve` step
- /// during runtime execution.
- fn parse(&self) -> anyhow::Result>;
-}
+mod parse;
+pub use parse::*;
/// A list of ordered [Input] arguments/parameters.
#[derive(Clone, Debug, PartialEq)]
@@ -148,9 +64,9 @@ where
/// [awaited promises]: Await
/// [inputs]: Input
/// [resolving Ipld links]: resolve_links
- pub fn resolve(self, lookup_fn: F) -> anyhow::Result
+ pub fn resolve(self, lookup_fn: F) -> Result
where
- F: Fn(Cid) -> anyhow::Result> + Clone,
+ F: Fn(Cid) -> Result, ResolveError> + Clone,
Ipld: From,
T: Clone,
{
@@ -173,9 +89,9 @@ impl TryFrom for Args
where
InstructionResult: TryFrom,
{
- type Error = anyhow::Error;
+ type Error = workflow::Error;
- fn try_from(ipld: Ipld) -> Result {
+ fn try_from(ipld: Ipld) -> Result {
if let Ipld::List(vec) = ipld {
let args = vec
.into_iter()
@@ -192,7 +108,7 @@ where
});
Ok(Args(args))
} else {
- Err(anyhow!("unexpected conversion type"))
+ Err(workflow::Error::not_an_ipld_list())
}
}
}
@@ -230,7 +146,7 @@ impl Input {
/// [resolving Ipld links]: resolve_links
pub fn resolve(self, lookup_fn: F) -> Input
where
- F: Fn(Cid) -> anyhow::Result> + Clone,
+ F: Fn(Cid) -> Result, ResolveError> + Clone,
Ipld: From,
{
match self {
@@ -280,7 +196,7 @@ impl TryFrom for Input
where
T: From,
{
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: Ipld) -> Result {
let Ok(map) = from_ipld::>(ipld.to_owned()) else {
@@ -304,8 +220,9 @@ where
let instruction = Pointer::try_from(ipld)?;
Ok(Input::Deferred(Await::new(
instruction,
- AwaitResult::result(branch)
- .ok_or_else(|| anyhow!("wrong branch name: {branch}"))?,
+ AwaitResult::result(branch).ok_or_else(|| {
+ workflow::Error::InvalidDiscriminant(branch.to_string())
+ })?,
)))
},
)
@@ -314,7 +231,7 @@ where
fn resolve_args(args: Vec>, lookup_fn: F) -> Vec>
where
- F: Fn(Cid) -> anyhow::Result> + Clone,
+ F: Fn(Cid) -> Result, ResolveError> + Clone,
Ipld: From,
{
let args = args.into_iter().map(|v| v.resolve(lookup_fn.clone()));
@@ -326,7 +243,7 @@ where
/// [awaited promises]: Await
pub fn resolve_links(ipld: Ipld, lookup_fn: F) -> Ipld
where
- F: Fn(Cid) -> anyhow::Result> + Clone,
+ F: Fn(Cid) -> Result, ResolveError> + Clone,
Ipld: From,
{
match ipld {
diff --git a/homestar-core/src/workflow/input/parse.rs b/homestar-core/src/workflow/input/parse.rs
new file mode 100644
index 00000000..bf27c384
--- /dev/null
+++ b/homestar-core/src/workflow/input/parse.rs
@@ -0,0 +1,89 @@
+use crate::workflow::{error::InputParseError, input::Args};
+
+/// Parsed [Args] consisting of [Inputs] for execution flows, as well as an
+/// optional function name/definition.
+///
+/// TODO: Extend via enumeration for singular objects/values.
+///
+/// [Inputs]: super::Input
+#[derive(Clone, Debug, PartialEq)]
+pub struct Parsed {
+ args: Args,
+ fun: Option,
+}
+
+impl Parsed {
+ /// Initiate [Parsed] data structure with only [Args].
+ pub fn with(args: Args) -> Self {
+ Parsed { args, fun: None }
+ }
+
+ /// Initiate [Parsed] data structure with a function name and
+ /// [Args].
+ pub fn with_fn(fun: String, args: Args) -> Self {
+ Parsed {
+ args,
+ fun: Some(fun),
+ }
+ }
+
+ /// Parsed arguments.
+ pub fn args(&self) -> &Args {
+ &self.args
+ }
+
+ /// Turn [Parsed] structure into owned [Args].
+ pub fn into_args(self) -> Args {
+ self.args
+ }
+
+ /// Parsed function named.
+ pub fn fun(&self) -> Option {
+ self.fun.as_ref().map(|f| f.to_string())
+ }
+}
+
+impl From> for Args {
+ fn from(apply: Parsed) -> Self {
+ apply.args
+ }
+}
+
+/// Interface for [Instruction] implementations, relying on `homestore-core`
+/// to implement custom parsing specifics.
+///
+/// # Example
+///
+/// ```
+/// use homestar_core::{
+/// workflow::{
+/// input::{Args, Parse}, Ability, Input, Instruction,
+/// },
+/// Unit,
+/// };
+/// use libipld::Ipld;
+/// use url::Url;
+///
+/// let wasm = "bafkreidztuwoszw2dfnzufjpsjmzj67x574qcdm2autnhnv43o3t4zmh7i".to_string();
+/// let resource = Url::parse(format!("ipfs://{wasm}").as_str()).unwrap();
+///
+/// let inst = Instruction::unique(
+/// resource,
+/// Ability::from("wasm/run"),
+/// Input::::Ipld(Ipld::List(vec![Ipld::Bool(true)]))
+/// );
+///
+/// let parsed = inst.input().parse().unwrap();
+///
+/// // turn into Args for invocation:
+/// let args: Args = parsed.try_into().unwrap();
+/// ```
+///
+/// [Instruction]: crate::workflow::Instruction
+pub trait Parse {
+ /// Function returning [Parsed] structure for execution/invocation.
+ ///
+ /// Note: meant to come before the `resolve` step
+ /// during runtime execution.
+ fn parse(&self) -> Result, InputParseError>;
+}
diff --git a/homestar-core/src/workflow/instruction.rs b/homestar-core/src/workflow/instruction.rs
index efe66403..a413c1d0 100644
--- a/homestar-core/src/workflow/instruction.rs
+++ b/homestar-core/src/workflow/instruction.rs
@@ -1,8 +1,11 @@
//! An [Instruction] is the smallest unit of work that can be requested from a
//! UCAN, described via `resource`, `ability`.
-use super::{Ability, Input, Nonce, Pointer};
-use anyhow::anyhow;
+use crate::{
+ consts::DAG_CBOR,
+ workflow::{Ability, Error as WorkflowError, Input, Nonce, Pointer},
+ Unit,
+};
use libipld::{
cbor::DagCborCodec,
cid::{
@@ -17,7 +20,6 @@ use libipld::{
use std::{borrow::Cow, collections::BTreeMap, fmt};
use url::Url;
-const DAG_CBOR: u64 = 0x71;
const RESOURCE_KEY: &str = "rsc";
const OP_KEY: &str = "op";
const INPUT_KEY: &str = "input";
@@ -43,12 +45,12 @@ impl<'a, T> TryFrom> for Instruction<'a, T>
where
T: fmt::Debug,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError>;
fn try_from(run: RunInstruction<'a, T>) -> Result {
match run {
RunInstruction::Expanded(instruction) => Ok(instruction),
- e => Err(anyhow!("wrong discriminant: {e:?}")),
+ e => Err(WorkflowError::InvalidDiscriminant(e)),
}
}
}
@@ -63,12 +65,12 @@ impl<'a, T> TryFrom> for Pointer
where
T: fmt::Debug,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError>;
fn try_from(run: RunInstruction<'a, T>) -> Result {
match run {
RunInstruction::Ptr(ptr) => Ok(ptr),
- e => Err(anyhow!("wrong discriminant: {e:?}")),
+ e => Err(WorkflowError::InvalidDiscriminant(e)),
}
}
}
@@ -77,12 +79,12 @@ impl<'a, 'b, T> TryFrom<&'b RunInstruction<'a, T>> for &'b Pointer
where
T: fmt::Debug,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError<&'b RunInstruction<'a, T>>;
fn try_from(run: &'b RunInstruction<'a, T>) -> Result {
match run {
RunInstruction::Ptr(ptr) => Ok(ptr),
- e => Err(anyhow!("wrong discriminant: {e:?}")),
+ e => Err(WorkflowError::InvalidDiscriminant(e)),
}
}
}
@@ -91,12 +93,12 @@ impl<'a, 'b, T> TryFrom<&'b RunInstruction<'a, T>> for Pointer
where
T: fmt::Debug,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError<&'b RunInstruction<'a, T>>;
fn try_from(run: &'b RunInstruction<'a, T>) -> Result {
match run {
RunInstruction::Ptr(ptr) => Ok(ptr.to_owned()),
- e => Err(anyhow!("wrong discriminant: {e:?}")),
+ e => Err(WorkflowError::InvalidDiscriminant(e)),
}
}
}
@@ -117,13 +119,13 @@ impl TryFrom for RunInstruction<'_, T>
where
T: From,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError;
fn try_from<'a>(ipld: Ipld) -> Result {
match ipld {
Ipld::Map(_) => Ok(RunInstruction::Expanded(Instruction::try_from(ipld)?)),
Ipld::Link(_) => Ok(RunInstruction::Ptr(Pointer::try_from(ipld)?)),
- _ => Err(anyhow!("unexpected conversion type")),
+ other_ipld => Err(WorkflowError::unexpected_ipld(other_ipld)),
}
}
}
@@ -241,7 +243,7 @@ impl TryFrom> for Pointer
where
Ipld: From,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError;
fn try_from(instruction: Instruction<'_, T>) -> Result {
Ok(Pointer::new(Cid::try_from(instruction)?))
@@ -252,7 +254,7 @@ impl TryFrom> for Cid
where
Ipld: From,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError;
fn try_from(instruction: Instruction<'_, T>) -> Result {
let ipld: Ipld = instruction.into();
@@ -280,7 +282,7 @@ impl TryFrom<&Ipld> for Instruction<'_, T>
where
T: From,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError;
fn try_from(ipld: &Ipld) -> Result {
TryFrom::try_from(ipld.to_owned())
@@ -291,35 +293,37 @@ impl TryFrom for Instruction<'_, T>
where
T: From,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError;
fn try_from(ipld: Ipld) -> Result {
let map = from_ipld::>(ipld)?;
let rsc = match map.get(RESOURCE_KEY) {
Some(Ipld::Link(cid)) => cid
- .to_string_of_base(Base::Base32Lower)
- .map_err(|e| anyhow!("failed to encode CID into multibase string: {e}"))
+ .to_string_of_base(Base::Base32Lower) // Cid v1
+ .map_err(WorkflowError::::CidError)
.and_then(|txt| {
Url::parse(format!("{}{}", "ipfs://", txt).as_str())
- .map_err(|e| anyhow!("failed to parse URL: {e}"))
+ .map_err(WorkflowError::ParseResourceError)
}),
Some(Ipld::String(txt)) => {
- Url::parse(txt.as_str()).map_err(|e| anyhow!("failed to parse URL: {e}"))
+ Url::parse(txt.as_str()).map_err(WorkflowError::ParseResourceError)
}
- _ => Err(anyhow!("no resource/with set.")),
+ _ => Err(WorkflowError::MissingFieldError(RESOURCE_KEY.to_string())),
}?;
Ok(Self {
rsc,
op: from_ipld(
map.get(OP_KEY)
- .ok_or_else(|| anyhow!("no `op` field set"))?
+ .ok_or_else(|| WorkflowError::::MissingFieldError(OP_KEY.to_string()))?
.to_owned(),
)?,
input: Input::try_from(
map.get(INPUT_KEY)
- .ok_or_else(|| anyhow!("no `input` field set"))?
+ .ok_or_else(|| {
+ WorkflowError::::MissingFieldError(INPUT_KEY.to_string())
+ })?
.to_owned(),
)?,
nnc: Nonce::try_from(
diff --git a/homestar-core/src/workflow/instruction_result.rs b/homestar-core/src/workflow/instruction_result.rs
index 5674bd0a..4b297aa9 100644
--- a/homestar-core/src/workflow/instruction_result.rs
+++ b/homestar-core/src/workflow/instruction_result.rs
@@ -3,7 +3,7 @@
//!
//! [Instruction]: super::Instruction
-use anyhow::anyhow;
+use crate::{workflow, Unit};
use diesel::{
backend::Backend,
deserialize::{self, FromSql},
@@ -76,9 +76,9 @@ impl TryFrom for InstructionResult
where
T: From,
{
- type Error = anyhow::Error;
+ type Error = workflow::Error;
- fn try_from(ipld: Ipld) -> Result {
+ fn try_from(ipld: Ipld) -> Result> {
if let Ipld::List(v) = ipld {
match &v[..] {
[Ipld::String(result), res] if result == OK => {
@@ -90,10 +90,12 @@ where
[Ipld::String(result), res] if result == JUST => {
Ok(InstructionResult::Just(res.to_owned().try_into()?))
}
- _ => Err(anyhow!("unexpected conversion type")),
+ other_ipld => Err(workflow::Error::unexpected_ipld(
+ other_ipld.to_owned().into(),
+ )),
}
} else {
- Err(anyhow!("not convertible to Ipld"))
+ Err(workflow::Error::not_an_ipld_list())
}
}
}
@@ -102,9 +104,9 @@ impl TryFrom<&Ipld> for InstructionResult
where
T: From,
{
- type Error = anyhow::Error;
+ type Error = workflow::Error;
- fn try_from(ipld: &Ipld) -> Result {
+ fn try_from(ipld: &Ipld) -> Result> {
TryFrom::try_from(ipld.to_owned())
}
}
diff --git a/homestar-core/src/workflow/invocation.rs b/homestar-core/src/workflow/invocation.rs
index e3e75810..fd02bb56 100644
--- a/homestar-core/src/workflow/invocation.rs
+++ b/homestar-core/src/workflow/invocation.rs
@@ -2,8 +2,11 @@
//!
//! [Task]: super::Task
-use super::{Pointer, Task};
-use anyhow::anyhow;
+use crate::{
+ consts::DAG_CBOR,
+ workflow::{Error as WorkflowError, Pointer, Task},
+ Unit,
+};
use libipld::{
cbor::DagCborCodec,
cid::{
@@ -16,7 +19,6 @@ use libipld::{
};
use std::collections::BTreeMap;
-const DAG_CBOR: u64 = 0x71;
const TASK_KEY: &str = "task";
/// A signed [Task] wrapper/container.
@@ -48,7 +50,7 @@ impl TryFrom> for Ipld
where
Ipld: From,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError;
fn try_from(invocation: Invocation<'_, T>) -> Result {
let map = Ipld::Map(BTreeMap::from([(
@@ -64,7 +66,7 @@ impl TryFrom for Invocation<'_, T>
where
T: From,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError;
fn try_from(ipld: Ipld) -> Result {
let map = from_ipld::>(ipld)?;
@@ -72,7 +74,7 @@ where
Ok(Self {
task: Task::try_from(
map.get(TASK_KEY)
- .ok_or_else(|| anyhow!("no `task` set"))?
+ .ok_or_else(|| WorkflowError::::MissingFieldError(TASK_KEY.to_string()))?
.to_owned(),
)?,
})
@@ -83,7 +85,7 @@ impl TryFrom> for Pointer
where
Ipld: From,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError;
fn try_from(invocation: Invocation<'_, T>) -> Result {
Ok(Pointer::new(Cid::try_from(invocation)?))
@@ -94,7 +96,7 @@ impl TryFrom> for Cid
where
Ipld: From,
{
- type Error = anyhow::Error;
+ type Error = WorkflowError;
fn try_from(invocation: Invocation<'_, T>) -> Result {
let ipld: Ipld = invocation.try_into()?;
diff --git a/homestar-core/src/workflow/issuer.rs b/homestar-core/src/workflow/issuer.rs
index 35af0426..ef583904 100644
--- a/homestar-core/src/workflow/issuer.rs
+++ b/homestar-core/src/workflow/issuer.rs
@@ -1,6 +1,7 @@
//! Issuer referring to a principal (principal of least authority) that issues
//! a receipt.
+use crate::{workflow, Unit};
use diesel::{
backend::Backend,
deserialize::{self, FromSql},
@@ -46,7 +47,7 @@ impl From for Ipld {
}
impl TryFrom for Issuer {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: Ipld) -> Result {
let s = from_ipld::(ipld)?;
diff --git a/homestar-core/src/workflow/nonce.rs b/homestar-core/src/workflow/nonce.rs
index 4603236b..6e1e2847 100644
--- a/homestar-core/src/workflow/nonce.rs
+++ b/homestar-core/src/workflow/nonce.rs
@@ -2,7 +2,7 @@
//!
//! [Instruction]: super::Instruction
-use anyhow::anyhow;
+use crate::{workflow, Unit};
use enum_as_inner::EnumAsInner;
use generic_array::{
typenum::consts::{U12, U16},
@@ -61,7 +61,7 @@ impl From for Ipld {
}
impl TryFrom for Nonce {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: Ipld) -> Result {
if let Ipld::List(v) = ipld {
@@ -72,7 +72,9 @@ impl TryFrom for Nonce {
[Ipld::Integer(1), Ipld::Bytes(nonce)] => {
Ok(Nonce::Nonce128(*GenericArray::from_slice(nonce)))
}
- _ => Err(anyhow!("unexpected conversion type")),
+ other_ipld => Err(workflow::Error::unexpected_ipld(
+ other_ipld.to_owned().into(),
+ )),
}
} else {
Ok(Nonce::Empty)
@@ -81,7 +83,7 @@ impl TryFrom for Nonce {
}
impl TryFrom<&Ipld> for Nonce {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: &Ipld) -> Result {
TryFrom::try_from(ipld.to_owned())
diff --git a/homestar-core/src/workflow/pointer.rs b/homestar-core/src/workflow/pointer.rs
index 1f4be658..c24c1e5b 100644
--- a/homestar-core/src/workflow/pointer.rs
+++ b/homestar-core/src/workflow/pointer.rs
@@ -8,7 +8,7 @@
//! [Instructions]: super::Instruction
//! [Receipts]: super::Receipt
-use anyhow::ensure;
+use crate::{ensure, workflow, Unit};
use diesel::{
backend::Backend,
deserialize::{self, FromSql},
@@ -122,11 +122,16 @@ impl From<&Await> for Ipld {
}
impl TryFrom for Await {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: Ipld) -> Result {
let map = from_ipld::>(ipld)?;
- ensure!(map.len() == 1, "unexpected keys inside awaited promise");
+ ensure!(
+ map.len() == 1,
+ workflow::Error::ConditionNotMet(
+ "await promise must jave only a single key ain a map".to_string()
+ )
+ );
let (key, value) = map.into_iter().next().unwrap();
let instruction = Pointer::try_from(value)?;
@@ -145,7 +150,7 @@ impl TryFrom for Await {
}
impl TryFrom<&Ipld> for Await {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: &Ipld) -> Result {
TryFrom::try_from(ipld.to_owned())
@@ -198,7 +203,7 @@ impl From for Ipld {
}
impl TryFrom for Pointer {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: Ipld) -> Result {
let s: Cid = from_ipld(ipld)?;
@@ -207,7 +212,7 @@ impl TryFrom for Pointer {
}
impl TryFrom<&Ipld> for Pointer {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: &Ipld) -> Result {
TryFrom::try_from(ipld.to_owned())
diff --git a/homestar-core/src/workflow/prf.rs b/homestar-core/src/workflow/prf.rs
index 333d30bf..66052ba7 100644
--- a/homestar-core/src/workflow/prf.rs
+++ b/homestar-core/src/workflow/prf.rs
@@ -3,6 +3,7 @@
//!
//! [Task]: super::Task
+use crate::{workflow, Unit};
use diesel::{
backend::Backend,
deserialize::{self, FromSql},
@@ -46,7 +47,7 @@ impl From for Ipld {
}
impl TryFrom for UcanPrf {
- type Error = anyhow::Error;
+ type Error = workflow::Error;
fn try_from(ipld: Ipld) -> Result {
if let Ipld::List(inner) = ipld {
@@ -63,7 +64,7 @@ impl TryFrom