From 202b8678da999d10baddb01c8223939711be98a6 Mon Sep 17 00:00:00 2001 From: Lut99 Date: Wed, 7 May 2025 16:07:11 +0200 Subject: [PATCH 01/39] refactor: Separating out the Policy Store, Policy Reasoner and API --- Cargo.lock | 832 ++++++----- Cargo.toml | 3 +- Dockerfile.dev | 152 -- Dockerfile.let | 55 + Dockerfile.rls | 192 ++- Makefile | 64 +- brane-api/src/data.rs | 6 +- brane-api/src/infra.rs | 4 +- brane-api/src/packages.rs | 2 +- brane-ast/src/ast_unresolved.rs | 5 +- brane-ast/src/compile.rs | 4 +- brane-ast/src/dsl.rs | 109 ++ brane-ast/src/edgebuffer.rs | 7 +- brane-ast/src/errors.rs | 7 +- brane-ast/src/lib.rs | 13 +- brane-ast/src/spec.rs | 201 --- brane-ast/src/state.rs | 82 +- brane-ast/src/traversals/compile.rs | 15 +- brane-ast/src/traversals/data.rs | 4 +- brane-ast/src/traversals/print/ast.rs | 9 +- .../src/traversals/print/ast_unresolved.rs | 4 +- brane-ast/src/traversals/resolve.rs | 6 +- brane-ast/src/traversals/typing.rs | 6 +- brane-ast/src/traversals/workflow_resolve.rs | 4 +- brane-ast/src/warnings.rs | 6 +- brane-cc/src/main.rs | 5 +- brane-cfg/Cargo.toml | 2 +- brane-cfg/src/backend.rs | 2 +- brane-cfg/src/infra.rs | 2 +- brane-cfg/src/node.rs | 25 +- brane-cfg/src/proxy.rs | 13 +- brane-chk/Cargo.toml | 59 + brane-chk/policy/README.md | 6 + brane-chk/policy/main.eflint | 36 + brane-chk/policy/metadata.eflint | 36 + brane-chk/policy/queries.eflint | 25 + brane-chk/policy/state.eflint | 30 + brane-chk/policy/state_assert.eflint | 24 + .../policy/tests/test_workflow_ext.eflint | 63 + brane-chk/policy/workflow_assert.eflint | 83 ++ brane-chk/policy/workflow_base.eflint | 50 + brane-chk/policy/workflow_ext.eflint | 64 + brane-chk/src/apis/deliberation.rs | 492 +++++++ brane-chk/src/apis/mod.rs | 21 + brane-chk/src/apis/reasoner.rs | 110 ++ brane-chk/src/lib.rs | 22 + brane-chk/src/main.rs | 204 +++ brane-chk/src/question.rs | 88 ++ brane-chk/src/reasonerconn.rs | 93 ++ brane-chk/src/state.rs | 39 + brane-chk/src/stateresolver.rs | 821 +++++++++++ brane-chk/src/workflow/compile.rs | 507 +++++++ brane-chk/src/workflow/compiler.rs | 324 +++++ brane-chk/src/workflow/eflint.rs | 531 +++++++ brane-chk/src/workflow/mod.rs | 26 + brane-chk/src/workflow/preprocess.rs | 1108 ++++++++++++++ brane-chk/src/workflow/tests.rs | 210 +++ brane-chk/src/workflow/utils.rs | 30 + brane-cli-c/Cargo.toml | 2 +- brane-cli-c/src/lib.rs | 10 +- brane-cli/Cargo.toml | 2 +- brane-cli/src/check.rs | 5 +- brane-cli/src/data.rs | 12 +- brane-cli/src/errors.rs | 8 +- brane-cli/src/instance.rs | 20 +- brane-cli/src/planner.rs | 7 +- brane-cli/src/registry.rs | 6 +- brane-cli/src/repl.rs | 176 ++- brane-cli/src/run.rs | 28 +- brane-cli/src/test.rs | 4 +- brane-cli/src/vm.rs | 8 +- brane-ctl/Cargo.toml | 15 +- brane-ctl/src/cli.rs | 13 +- brane-ctl/src/errors.rs | 10 +- brane-ctl/src/generate.rs | 134 +- brane-ctl/src/lifetime.rs | 43 +- brane-ctl/src/main.rs | 6 +- brane-ctl/src/policies.rs | 371 +++-- brane-ctl/src/spec.rs | 25 +- brane-ctl/src/upgrade.rs | 38 +- brane-ctl/src/wizard.rs | 6 +- brane-drv/Cargo.toml | 3 +- brane-drv/src/check.rs | 98 +- brane-drv/src/handler.rs | 4 +- brane-drv/src/planner.rs | 6 +- brane-drv/src/vm.rs | 22 +- brane-dsl/src/data_type.rs | 24 +- brane-dsl/src/spec.rs | 62 +- brane-exe/Cargo.toml | 1 - brane-exe/src/dummy.rs | 10 +- brane-exe/src/errors.rs | 10 +- brane-exe/src/frame_stack.rs | 10 +- brane-exe/src/lib.rs | 3 +- brane-exe/src/spec.rs | 8 +- brane-exe/src/thread.rs | 15 +- brane-exe/src/value.rs | 10 +- brane-exe/src/vm.rs | 6 +- brane-job/Cargo.toml | 4 +- brane-job/src/cli.rs | 3 + brane-job/src/main.rs | 9 +- brane-job/src/worker.rs | 328 ++--- brane-let/Cargo.toml | 2 - brane-let/src/common.rs | 4 +- brane-let/src/errors.rs | 4 +- brane-log/Cargo.toml | 32 + brane-plr/Cargo.toml | 4 +- brane-plr/src/planner.rs | 55 +- brane-prx/Cargo.toml | 2 +- brane-prx/src/client.rs | 20 +- brane-prx/src/manage.rs | 4 +- brane-reg/Cargo.toml | 4 +- brane-reg/src/check.rs | 17 +- brane-reg/src/cli.rs | 4 + brane-reg/src/data.rs | 65 +- brane-reg/src/main.rs | 4 +- brane-reg/src/spec.rs | 4 +- brane-tsk/Cargo.toml | 1 - brane-tsk/src/caches.rs | 6 +- brane-tsk/src/docker.rs | 143 +- brane-tsk/src/errors.rs | 45 +- brane-tsk/src/input.rs | 10 +- brane-tsk/src/spec.rs | 4 +- docker-compose-central.yml | 2 - docker-compose-proxy.yml | 2 - docker-compose-worker.yml | 29 +- specifications/Cargo.toml | 8 +- specifications/src/address.rs | 729 ++++++---- specifications/src/checking.rs | 591 +++++++- specifications/src/driving.rs | 4 +- specifications/src/lib.rs | 4 +- {brane-exe => specifications}/src/pc.rs | 14 +- specifications/src/registering.rs | 7 +- specifications/src/wir/builtins.rs | 116 ++ .../src/wir}/data_type.rs | 76 +- .../src => specifications/src/wir}/func_id.rs | 4 +- .../src/wir}/locations.rs | 13 +- specifications/src/wir/merge_strategy.rs | 76 + .../ast.rs => specifications/src/wir/mod.rs | 201 +-- specifications/src/working.rs | 72 +- tests/eflint/tautology.eflint | 9 +- tests/wir/arrays.json | 716 +++++++++ tests/wir/attributes.json | 562 ++++++++ tests/wir/average.json | 235 +++ tests/wir/call.json | 204 +++ tests/wir/class.json | 285 ++++ tests/wir/comments.json | 98 ++ tests/wir/cutoff.json | 589 ++++++++ tests/wir/data.json | 384 +++++ tests/wir/data_complex.json | 1274 +++++++++++++++++ tests/wir/delayed_initialization.json | 143 ++ tests/wir/eflint/README.md | 13 + tests/wir/eflint/arrays.eflint | 3 + tests/wir/eflint/attributes.eflint | 154 ++ tests/wir/eflint/average.eflint | 9 + tests/wir/eflint/call.eflint | 1 + tests/wir/eflint/comments.eflint | 1 + tests/wir/eflint/cutoff.eflint | 4 + tests/wir/eflint/data.eflint | 20 + tests/wir/eflint/data_complex.eflint | 114 ++ .../wir/eflint/delayed_initialization.eflint | 1 + tests/wir/eflint/empty.eflint | 1 + tests/wir/eflint/epi.eflint | 24 + tests/wir/eflint/epi_one.eflint | 18 + tests/wir/eflint/for.eflint | 5 + tests/wir/eflint/function.eflint | 1 + tests/wir/eflint/hello_world.eflint | 4 + tests/wir/eflint/if.eflint | 1 + tests/wir/eflint/if_complex.eflint | 1 + tests/wir/eflint/import.eflint | 4 + tests/wir/eflint/math.eflint | 1 + tests/wir/eflint/metadata.eflint | 4 + tests/wir/eflint/on.eflint | 13 + tests/wir/eflint/parallel.eflint | 11 + tests/wir/eflint/scopes.eflint | 1 + tests/wir/eflint/vars.eflint | 1 + tests/wir/eflint/while.eflint | 3 + tests/wir/empty.json | 98 ++ tests/wir/epi.json | 306 ++++ tests/wir/epi_one.json | 265 ++++ tests/wir/for.json | 479 +++++++ tests/wir/function.json | 377 +++++ tests/wir/hello_world.json | 139 ++ tests/wir/if.json | 303 ++++ tests/wir/if_complex.json | 703 +++++++++ tests/wir/import.json | 159 ++ tests/wir/math.json | 229 +++ tests/wir/metadata.json | 143 ++ tests/wir/on.json | 306 ++++ tests/wir/parallel.json | 456 ++++++ tests/wir/recursion.json | 655 +++++++++ tests/wir/scopes.json | 219 +++ tests/wir/vars.json | 189 +++ tests/wir/while.json | 185 +++ 193 files changed, 18679 insertions(+), 2354 deletions(-) delete mode 100644 Dockerfile.dev create mode 100644 Dockerfile.let create mode 100644 brane-ast/src/dsl.rs delete mode 100644 brane-ast/src/spec.rs create mode 100644 brane-chk/Cargo.toml create mode 100644 brane-chk/policy/README.md create mode 100644 brane-chk/policy/main.eflint create mode 100644 brane-chk/policy/metadata.eflint create mode 100644 brane-chk/policy/queries.eflint create mode 100644 brane-chk/policy/state.eflint create mode 100644 brane-chk/policy/state_assert.eflint create mode 100644 brane-chk/policy/tests/test_workflow_ext.eflint create mode 100644 brane-chk/policy/workflow_assert.eflint create mode 100644 brane-chk/policy/workflow_base.eflint create mode 100644 brane-chk/policy/workflow_ext.eflint create mode 100644 brane-chk/src/apis/deliberation.rs create mode 100644 brane-chk/src/apis/mod.rs create mode 100644 brane-chk/src/apis/reasoner.rs create mode 100644 brane-chk/src/lib.rs create mode 100644 brane-chk/src/main.rs create mode 100644 brane-chk/src/question.rs create mode 100644 brane-chk/src/reasonerconn.rs create mode 100644 brane-chk/src/state.rs create mode 100644 brane-chk/src/stateresolver.rs create mode 100644 brane-chk/src/workflow/compile.rs create mode 100644 brane-chk/src/workflow/compiler.rs create mode 100644 brane-chk/src/workflow/eflint.rs create mode 100644 brane-chk/src/workflow/mod.rs create mode 100644 brane-chk/src/workflow/preprocess.rs create mode 100644 brane-chk/src/workflow/tests.rs create mode 100644 brane-chk/src/workflow/utils.rs create mode 100644 brane-log/Cargo.toml rename {brane-exe => specifications}/src/pc.rs (97%) create mode 100644 specifications/src/wir/builtins.rs rename {brane-ast/src => specifications/src/wir}/data_type.rs (73%) rename {brane-ast/src => specifications/src/wir}/func_id.rs (99%) rename {brane-ast/src => specifications/src/wir}/locations.rs (80%) create mode 100644 specifications/src/wir/merge_strategy.rs rename brane-ast/src/ast.rs => specifications/src/wir/mod.rs (85%) create mode 100644 tests/wir/arrays.json create mode 100644 tests/wir/attributes.json create mode 100644 tests/wir/average.json create mode 100644 tests/wir/call.json create mode 100644 tests/wir/class.json create mode 100644 tests/wir/comments.json create mode 100644 tests/wir/cutoff.json create mode 100644 tests/wir/data.json create mode 100644 tests/wir/data_complex.json create mode 100644 tests/wir/delayed_initialization.json create mode 100644 tests/wir/eflint/README.md create mode 100644 tests/wir/eflint/arrays.eflint create mode 100644 tests/wir/eflint/attributes.eflint create mode 100644 tests/wir/eflint/average.eflint create mode 100644 tests/wir/eflint/call.eflint create mode 100644 tests/wir/eflint/comments.eflint create mode 100644 tests/wir/eflint/cutoff.eflint create mode 100644 tests/wir/eflint/data.eflint create mode 100644 tests/wir/eflint/data_complex.eflint create mode 100644 tests/wir/eflint/delayed_initialization.eflint create mode 100644 tests/wir/eflint/empty.eflint create mode 100644 tests/wir/eflint/epi.eflint create mode 100644 tests/wir/eflint/epi_one.eflint create mode 100644 tests/wir/eflint/for.eflint create mode 100644 tests/wir/eflint/function.eflint create mode 100644 tests/wir/eflint/hello_world.eflint create mode 100644 tests/wir/eflint/if.eflint create mode 100644 tests/wir/eflint/if_complex.eflint create mode 100644 tests/wir/eflint/import.eflint create mode 100644 tests/wir/eflint/math.eflint create mode 100644 tests/wir/eflint/metadata.eflint create mode 100644 tests/wir/eflint/on.eflint create mode 100644 tests/wir/eflint/parallel.eflint create mode 100644 tests/wir/eflint/scopes.eflint create mode 100644 tests/wir/eflint/vars.eflint create mode 100644 tests/wir/eflint/while.eflint create mode 100644 tests/wir/empty.json create mode 100644 tests/wir/epi.json create mode 100644 tests/wir/epi_one.json create mode 100644 tests/wir/for.json create mode 100644 tests/wir/function.json create mode 100644 tests/wir/hello_world.json create mode 100644 tests/wir/if.json create mode 100644 tests/wir/if_complex.json create mode 100644 tests/wir/import.json create mode 100644 tests/wir/math.json create mode 100644 tests/wir/metadata.json create mode 100644 tests/wir/on.json create mode 100644 tests/wir/parallel.json create mode 100644 tests/wir/recursion.json create mode 100644 tests/wir/scopes.json create mode 100644 tests/wir/vars.json create mode 100644 tests/wir/while.json diff --git a/Cargo.lock b/Cargo.lock index 4bee7eb0..73a7bc76 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -241,34 +241,6 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" -[[package]] -name = "audit-logger" -version = "0.1.0" -source = "git+https://github.com/braneframework/policy-reasoner#6b54ba81ea00e3096287a1873126e45b515f1a7f" -dependencies = [ - "async-trait", - "auth-resolver", - "deliberation", - "enum-debug", - "hex", - "policy", - "serde", - "serde_json", - "state-resolver", - "warp", - "workflow", -] - -[[package]] -name = "auth-resolver" -version = "0.1.0" -source = "git+https://github.com/braneframework/policy-reasoner#6b54ba81ea00e3096287a1873126e45b515f1a7f" -dependencies = [ - "async-trait", - "serde", - "warp", -] - [[package]] name = "auto_enums" version = "0.8.7" @@ -294,24 +266,65 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", - "axum-core", + "axum-core 0.4.5", + "bytes", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "itoa", + "matchit 0.7.3", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper 1.0.2", + "tokio", + "tower 0.5.2", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5" +dependencies = [ + "axum-core 0.5.2", "bytes", + "form_urlencoded", "futures-util", "http 1.3.1", "http-body 1.0.1", "http-body-util", + "hyper 1.6.0", + "hyper-util", "itoa", - "matchit", + "matchit 0.8.4", "memchr", "mime", "percent-encoding", "pin-project-lite", "rustversion", "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", "sync_wrapper 1.0.2", + "tokio", "tower 0.5.2", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -332,6 +345,59 @@ dependencies = [ "sync_wrapper 1.0.2", "tower-layer", "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6" +dependencies = [ + "bytes", + "futures-core", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper 1.0.2", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-server" +version = "0.2.0" +source = "git+https://github.com/BraneFramework/policy-store#8afd377d3562b6159496691e4165304a41b38e17" +dependencies = [ + "axum 0.8.4", + "axum-server-spec", + "error-trace 3.3.1", + "futures", + "hyper 1.6.0", + "hyper-util", + "serde", + "serde_json", + "specifications 0.1.0", + "thiserror 2.0.12", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-server-spec" +version = "0.2.0" +source = "git+https://github.com/BraneFramework/policy-store#8afd377d3562b6159496691e4165304a41b38e17" +dependencies = [ + "axum 0.8.4", + "http 1.3.1", + "itertools 0.14.0", + "serde", + "specifications 0.1.0", ] [[package]] @@ -349,6 +415,21 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "backtrace-ext" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50" +dependencies = [ + "backtrace", +] + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + [[package]] name = "base64" version = "0.13.1" @@ -456,14 +537,14 @@ dependencies = [ "async-compression", "brane-cfg", "brane-prx", - "brane-shr 3.0.0", + "brane-shr", "bytes", "chrono", "clap", "dotenvy", "enum-debug", "env_logger 0.11.7", - "error-trace", + "error-trace 3.0.0", "juniper", "juniper_warp", "log", @@ -486,8 +567,8 @@ dependencies = [ name = "brane-ast" version = "3.0.0" dependencies = [ - "brane-dsl 3.0.0", - "brane-shr 3.0.0", + "brane-dsl", + "brane-shr", "console", "enum-debug", "lazy_static", @@ -502,34 +583,13 @@ dependencies = [ "uuid", ] -[[package]] -name = "brane-ast" -version = "3.0.0" -source = "git+https://github.com/braneframework/brane#07430945248a2fd2dc2cc99b6c8477c47ae2f4d2" -dependencies = [ - "brane-dsl 3.0.0 (git+https://github.com/braneframework/brane)", - "brane-shr 3.0.0 (git+https://github.com/braneframework/brane)", - "console", - "enum-debug", - "lazy_static", - "log", - "num-traits", - "rand 0.9.0", - "serde", - "serde_json_any_key", - "specifications 3.0.0 (git+https://github.com/braneframework/brane)", - "strum 0.27.1", - "thiserror 2.0.12", - "uuid", -] - [[package]] name = "brane-cc" version = "3.0.0" dependencies = [ - "brane-ast 3.0.0", - "brane-dsl 3.0.0", - "brane-shr 3.0.0", + "brane-ast", + "brane-dsl", + "brane-shr", "brane-tsk", "clap", "dotenvy", @@ -562,6 +622,39 @@ dependencies = [ "x509-parser", ] +[[package]] +name = "brane-chk" +version = "3.0.0" +dependencies = [ + "axum 0.7.9", + "base16ct", + "brane-ast", + "brane-cfg", + "brane-shr", + "brane-tsk", + "clap", + "enum-debug", + "error-trace 3.3.0", + "futures", + "humanlog", + "hyper 1.6.0", + "hyper-util", + "names 0.1.0 (git+https://github.com/Lut99/names-rs)", + "policy-reasoner", + "policy-store", + "rand 0.8.5", + "reqwest 0.12.14", + "serde", + "serde_json", + "shlex", + "specifications 3.0.0", + "thiserror 1.0.69", + "tokio", + "tower-service", + "tracing", + "tracing-subscriber", +] + [[package]] name = "brane-cli" version = "3.0.0" @@ -570,11 +663,11 @@ dependencies = [ "async-trait", "base64 0.22.1", "bollard", - "brane-ast 3.0.0", + "brane-ast", "brane-cfg", - "brane-dsl 3.0.0", - "brane-exe 3.0.0", - "brane-shr 3.0.0", + "brane-dsl", + "brane-exe", + "brane-shr", "brane-tsk", "chrono", "clap", @@ -583,7 +676,7 @@ dependencies = [ "dirs 6.0.0", "dotenvy", "enum-debug", - "error-trace", + "error-trace 3.0.0", "flate2", "fs_extra", "futures-util", @@ -594,10 +687,10 @@ dependencies = [ "indicatif", "lazy_static", "log", - "names", + "names 0.1.0 (git+https://github.com/Lut99/names-rs?tag=v0.1.0)", "parking_lot", "path-clean", - "prettytable-rs", + "prettytable", "rand 0.9.0", "reqwest 0.12.14", "rustls 0.21.12", @@ -622,9 +715,9 @@ dependencies = [ name = "brane-cli-c" version = "3.0.0" dependencies = [ - "brane-ast 3.0.0", + "brane-ast", "brane-cli", - "brane-exe 3.0.0", + "brane-exe", "brane-tsk", "console", "humanlog", @@ -639,22 +732,22 @@ dependencies = [ name = "brane-ctl" version = "3.0.0" dependencies = [ + "base16ct", "base64ct", "bollard", "brane-cfg", - "brane-shr 3.0.0", + "brane-shr", "brane-tsk", + "chrono", "clap", "console", "dialoguer", - "diesel", "diesel_migrations", "dirs 6.0.0", "dotenvy", "download", - "eflint-to-json", "enum-debug", - "error-trace", + "error-trace 3.0.0", "hex-literal", "human-panic 2.0.2", "humanlog", @@ -662,8 +755,8 @@ dependencies = [ "jsonwebtoken", "lazy_static", "log", - "names", - "policy", + "names 0.1.0 (git+https://github.com/Lut99/names-rs?tag=v0.1.0)", + "policy-store", "rand 0.9.0", "reqwest 0.12.14", "serde", @@ -671,7 +764,6 @@ dependencies = [ "serde_yml", "shlex", "specifications 3.0.0", - "srv", "tempfile", "thiserror 2.0.12", "tokio", @@ -682,19 +774,20 @@ name = "brane-drv" version = "3.0.0" dependencies = [ "async-trait", - "brane-ast 3.0.0", "brane-cfg", - "brane-exe 3.0.0", + "brane-exe", "brane-prx", - "brane-shr 3.0.0", + "brane-shr", "brane-tsk", "clap", "dashmap 6.1.0", "dotenvy", "enum-debug", "env_logger 0.11.7", - "error-trace", + "error-trace 3.0.0", "log", + "policy-reasoner", + "prost 0.12.6", "reqwest 0.12.14", "serde_json", "serde_json_any_key", @@ -709,7 +802,7 @@ dependencies = [ name = "brane-dsl" version = "3.0.0" dependencies = [ - "brane-shr 3.0.0", + "brane-shr", "enum-debug", "log", "nom", @@ -721,23 +814,6 @@ dependencies = [ "thiserror 2.0.12", ] -[[package]] -name = "brane-dsl" -version = "3.0.0" -source = "git+https://github.com/braneframework/brane#07430945248a2fd2dc2cc99b6c8477c47ae2f4d2" -dependencies = [ - "brane-shr 3.0.0 (git+https://github.com/braneframework/brane)", - "enum-debug", - "log", - "nom", - "nom_locate", - "rand 0.9.0", - "regex", - "serde", - "specifications 3.0.0 (git+https://github.com/braneframework/brane)", - "thiserror 2.0.12", -] - [[package]] name = "brane-exe" version = "3.0.0" @@ -745,15 +821,14 @@ dependencies = [ "async-recursion", "async-trait", "base64 0.22.1", - "brane-ast 3.0.0", - "brane-shr 3.0.0", + "brane-ast", + "brane-shr", "console", "enum-debug", "futures", "humanlog", "lazy_static", "log", - "num-traits", "serde", "serde_json", "specifications 3.0.0", @@ -761,51 +836,28 @@ dependencies = [ "tokio", ] -[[package]] -name = "brane-exe" -version = "3.0.0" -source = "git+https://github.com/braneframework/brane#07430945248a2fd2dc2cc99b6c8477c47ae2f4d2" -dependencies = [ - "async-recursion", - "async-trait", - "base64 0.22.1", - "brane-ast 3.0.0 (git+https://github.com/braneframework/brane)", - "brane-shr 3.0.0 (git+https://github.com/braneframework/brane)", - "console", - "enum-debug", - "futures", - "lazy_static", - "log", - "num-traits", - "serde", - "serde_json", - "specifications 3.0.0 (git+https://github.com/braneframework/brane)", - "thiserror 2.0.12", - "tokio", -] - [[package]] name = "brane-job" version = "3.0.0" dependencies = [ "base64 0.22.1", "bollard", - "brane-ast 3.0.0", "brane-cfg", - "brane-exe 3.0.0", + "brane-chk", + "brane-exe", "brane-prx", - "brane-shr 3.0.0", + "brane-shr", "brane-tsk", "chrono", "clap", - "deliberation", "dotenvy", "enum-debug", "env_logger 0.11.7", - "error-trace", + "error-trace 3.0.0", "futures-util", "hyper 1.6.0", "log", + "policy-reasoner", "reqwest 0.12.14", "serde", "serde_json", @@ -824,8 +876,7 @@ version = "3.0.0" dependencies = [ "anyhow", "base64 0.22.1", - "brane-ast 3.0.0", - "brane-exe 3.0.0", + "brane-exe", "clap", "dotenvy", "env_logger 0.11.7", @@ -846,16 +897,16 @@ name = "brane-plr" version = "3.0.0" dependencies = [ "async-recursion", - "brane-ast 3.0.0", "brane-cfg", "brane-prx", "brane-tsk", "clap", "dotenvy", - "error-trace", + "error-trace 3.0.0", "humanlog", "log", "parking_lot", + "policy-reasoner", "rand 0.9.0", "reqwest 0.12.14", "serde_json", @@ -875,7 +926,7 @@ dependencies = [ "clap", "dotenvy", "env_logger 0.11.7", - "error-trace", + "error-trace 3.0.0", "log", "never-say-never", "reqwest 0.12.14", @@ -895,18 +946,16 @@ dependencies = [ name = "brane-reg" version = "3.0.0" dependencies = [ - "brane-ast 3.0.0", "brane-cfg", - "brane-exe 3.0.0", - "brane-shr 3.0.0", + "brane-shr", "brane-tsk", "clap", - "deliberation", "dotenvy", "enum-debug", "env_logger 0.11.7", - "error-trace", + "error-trace 3.0.0", "log", + "policy-reasoner", "reqwest 0.12.14", "rustls 0.21.12", "serde", @@ -946,31 +995,6 @@ dependencies = [ "url", ] -[[package]] -name = "brane-shr" -version = "3.0.0" -source = "git+https://github.com/braneframework/brane#07430945248a2fd2dc2cc99b6c8477c47ae2f4d2" -dependencies = [ - "async-compression", - "console", - "dialoguer", - "enum-debug", - "fs2", - "hex", - "humanlog", - "indicatif", - "log", - "regex", - "reqwest 0.12.14", - "sha2", - "specifications 3.0.0 (git+https://github.com/braneframework/brane)", - "thiserror 2.0.12", - "tokio", - "tokio-stream", - "tokio-tar", - "url", -] - [[package]] name = "brane-tsk" version = "3.0.0" @@ -979,10 +1003,10 @@ dependencies = [ "base64 0.22.1", "base64ct", "bollard", - "brane-ast 3.0.0", + "brane-ast", "brane-cfg", - "brane-exe 3.0.0", - "brane-shr 3.0.0", + "brane-exe", + "brane-shr", "chrono", "clap", "console", @@ -1333,17 +1357,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "575f75dfd25738df5b91b8e43e14d44bda14637a58fae779fd2b064f8bf3e010" [[package]] -name = "deliberation" -version = "0.1.0" -source = "git+https://github.com/braneframework/policy-reasoner#6b54ba81ea00e3096287a1873126e45b515f1a7f" +name = "deadpool" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ed5957ff93768adf7a65ab167a17835c3d2c3c50d084fe305174c112f468e2f" dependencies = [ - "brane-ast 3.0.0 (git+https://github.com/braneframework/brane)", - "brane-exe 3.0.0 (git+https://github.com/braneframework/brane)", - "enum-debug", - "log", - "serde", - "serde_json", - "uuid", + "deadpool-runtime", + "num_cpus", + "tokio", +] + +[[package]] +name = "deadpool-diesel" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "590573e9e29c5190a5ff782136f871e6e652e35d598a349888e028693601adf1" +dependencies = [ + "deadpool", + "deadpool-sync", + "diesel", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" +dependencies = [ + "tokio", +] + +[[package]] +name = "deadpool-sync" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524bc3df0d57e98ecd022e21ba31166c2625e7d3e5bcc4510efaeeab4abcab04" +dependencies = [ + "deadpool-runtime", + "tracing", ] [[package]] @@ -1400,6 +1451,7 @@ version = "2.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "470eb10efc8646313634c99bb1593f402a6434cbd86e266770c6e39219adb86a" dependencies = [ + "chrono", "diesel_derives", "libsqlite3-sys", "time", @@ -1561,29 +1613,18 @@ dependencies = [ ] [[package]] -name = "eflint-json" +name = "eflint-haskell-reasoner" version = "0.1.0" -source = "git+https://gitlab.com/eflint/json-spec-rs.git?branch=incorrect-is-invariant#a77ae8c5050fcbeb36340ec86f353099d6c51182" +source = "git+https://github.com/BraneFramework/policy-reasoner?branch=lib-refactor#f96e4639467e1640d8d80631c1dc08ff2980d750" dependencies = [ - "enum-debug", + "error-trace 3.3.1", "serde", -] - -[[package]] -name = "eflint-to-json" -version = "0.1.0" -source = "git+https://github.com/braneframework/policy-reasoner#6b54ba81ea00e3096287a1873126e45b515f1a7f" -dependencies = [ - "async-recursion", - "console", - "futures-util", - "hex", - "hex-literal", - "indicatif", - "log", - "reqwest 0.12.14", "sha2", + "share", + "specifications 0.2.0", + "thiserror 2.0.12", "tokio", + "tracing", ] [[package]] @@ -1693,6 +1734,23 @@ name = "error-trace" version = "3.0.0" source = "git+https://github.com/Lut99/error-trace-rs?tag=v3.0.0#a026dc304b5bcc40bc68574f21459d2f1b6cad90" +[[package]] +name = "error-trace" +version = "3.3.0" +source = "git+https://github.com/Lut99/error-trace-rs?tag=v3.3.0#6db81b192df2323f9d675bf5c78f6e0d21f49b49" +dependencies = [ + "serde", +] + +[[package]] +name = "error-trace" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17beb87ca4ef7c5edd18148b494b437b00a019e6f22bf1e557c2f370d24c445" +dependencies = [ + "serde", +] + [[package]] name = "expanduser" version = "1.2.2" @@ -1721,6 +1779,21 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "file-logger" +version = "0.2.0" +source = "git+https://github.com/BraneFramework/policy-reasoner?branch=lib-refactor#f96e4639467e1640d8d80631c1dc08ff2980d750" +dependencies = [ + "chrono", + "enum-debug", + "serde", + "serde_json", + "specifications 0.2.0", + "thiserror 2.0.12", + "tokio", + "tracing", +] + [[package]] name = "filetime" version = "0.2.25" @@ -2192,16 +2265,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "http-serde" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f560b665ad9f1572cfcaf034f7fb84338a7ce945216d64a90fd81f046a3caee" -dependencies = [ - "http 0.2.12", - "serde", -] - [[package]] name = "httparse" version = "1.10.1" @@ -2637,6 +2700,12 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "is_ci" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" + [[package]] name = "is_terminal_polyfill" version = "1.70.1" @@ -2652,6 +2721,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.15" @@ -2752,6 +2830,21 @@ dependencies = [ "warp", ] +[[package]] +name = "jwk-auth" +version = "0.2.0" +source = "git+https://github.com/BraneFramework/policy-store#8afd377d3562b6159496691e4165304a41b38e17" +dependencies = [ + "base64ct", + "http 1.3.1", + "jsonwebtoken", + "serde_json", + "specifications 0.1.0", + "thiserror 2.0.12", + "time", + "tracing", +] + [[package]] name = "lazy_static" version = "1.5.0" @@ -2855,6 +2948,12 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "memchr" version = "2.7.4" @@ -2870,6 +2969,36 @@ dependencies = [ "autocfg", ] +[[package]] +name = "miette" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7" +dependencies = [ + "backtrace", + "backtrace-ext", + "cfg-if", + "miette-derive", + "owo-colors", + "supports-color", + "supports-hyperlinks", + "supports-unicode", + "terminal_size", + "textwrap", + "unicode-width 0.1.14", +] + +[[package]] +name = "miette-derive" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "migrations_internals" version = "2.2.0" @@ -2959,6 +3088,14 @@ dependencies = [ "rand 0.8.5", ] +[[package]] +name = "names" +version = "0.1.0" +source = "git+https://github.com/Lut99/names-rs#6fbb94733dfe526f7e68e374846e3802bac88327" +dependencies = [ + "rand 0.8.5", +] + [[package]] name = "native-tls" version = "0.2.14" @@ -3091,6 +3228,16 @@ dependencies = [ "autocfg", ] +[[package]] +name = "num_cpus" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" +dependencies = [ + "hermit-abi", + "libc", +] + [[package]] name = "num_enum" version = "0.6.1" @@ -3213,6 +3360,12 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" name = "overview" version = "3.0.0" +[[package]] +name = "owo-colors" +version = "4.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e" + [[package]] name = "parking_lot" version = "0.12.3" @@ -3303,16 +3456,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] -name = "policy" +name = "policy-reasoner" +version = "1.0.0" +source = "git+https://github.com/BraneFramework/policy-reasoner?branch=lib-refactor#f96e4639467e1640d8d80631c1dc08ff2980d750" +dependencies = [ + "eflint-haskell-reasoner", + "file-logger", + "miette", + "share", + "specifications 0.2.0", + "workflow", +] + +[[package]] +name = "policy-store" version = "0.1.0" -source = "git+https://github.com/braneframework/policy-reasoner#6b54ba81ea00e3096287a1873126e45b515f1a7f" +source = "git+https://github.com/BraneFramework/policy-store#8afd377d3562b6159496691e4165304a41b38e17" dependencies = [ - "async-trait", - "chrono", - "log", - "serde", - "serde_json", - "warp", + "axum-server", + "axum-server-spec", + "jwk-auth", + "specifications 0.1.0", + "sqlite-database", ] [[package]] @@ -3346,10 +3511,10 @@ dependencies = [ ] [[package]] -name = "prettytable-rs" +name = "prettytable" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eea25e07510aa6ab6547308ebe3c036016d162b8da920dbb079e3ba8acf3d95a" +checksum = "46480520d1b77c9a3482d39939fcf96831537a250ec62d4fd8fbdf8e0302e781" dependencies = [ "csv", "encode_unicode", @@ -3359,17 +3524,6 @@ dependencies = [ "unicode-width 0.1.14", ] -[[package]] -name = "problem_details" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09c30ce00f18c85c0a9cfaecb51d7d2bce8ca36f2d7850b009b3ea166f2dca9d" -dependencies = [ - "http 0.2.12", - "http-serde", - "serde", -] - [[package]] name = "proc-macro-crate" version = "1.3.1" @@ -3389,6 +3543,16 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "prost" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" +dependencies = [ + "bytes", + "prost-derive 0.12.6", +] + [[package]] name = "prost" version = "0.13.5" @@ -3396,7 +3560,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" dependencies = [ "bytes", - "prost-derive", + "prost-derive 0.13.5", +] + +[[package]] +name = "prost-derive" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" +dependencies = [ + "anyhow", + "itertools 0.11.0", + "proc-macro2", + "quote", + "syn 2.0.100", ] [[package]] @@ -3406,7 +3583,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools", + "itertools 0.11.0", "proc-macro2", "quote", "syn 2.0.100", @@ -3562,26 +3739,6 @@ dependencies = [ "rand_core 0.6.4", ] -[[package]] -name = "reasonerconn" -version = "0.1.0" -source = "git+https://github.com/braneframework/policy-reasoner#6b54ba81ea00e3096287a1873126e45b515f1a7f" -dependencies = [ - "anyhow", - "async-trait", - "audit-logger", - "eflint-json", - "enum-debug", - "log", - "policy", - "serde", - "serde_json", - "state-resolver", - "tokio", - "transform", - "workflow", -] - [[package]] name = "redox_syscall" version = "0.1.57" @@ -3735,7 +3892,6 @@ dependencies = [ "base64 0.22.1", "bytes", "encoding_rs", - "futures-channel", "futures-core", "futures-util", "h2 0.4.8", @@ -4009,7 +4165,7 @@ dependencies = [ "dashmap 5.5.3", "futures", "histogram", - "itertools", + "itertools 0.11.0", "lz4_flex", "num_enum", "rand 0.8.5", @@ -4128,6 +4284,16 @@ dependencies = [ "serde_json", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59fab13f937fa393d08645bf3a84bdfe86e296747b506ada67bb15f10f218b2a" +dependencies = [ + "itoa", + "serde", +] + [[package]] name = "serde_repr" version = "0.1.20" @@ -4247,6 +4413,16 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "share" +version = "0.1.0" +source = "git+https://github.com/BraneFramework/policy-reasoner?branch=lib-refactor#f96e4639467e1640d8d80631c1dc08ff2980d750" +dependencies = [ + "miette", + "tempfile", + "tokio", +] + [[package]] name = "shell-words" version = "1.1.0" @@ -4325,7 +4501,7 @@ dependencies = [ [[package]] name = "socksx" version = "2.0.0" -source = "git+https://github.com/braneframework/socksx?tag=v2.0.0#9ad245649bb8a3e155aea95313cbcb9d7850ff6d" +source = "git+https://github.com/epi-project/socksx?tag=v2.0.0#9ad245649bb8a3e155aea95313cbcb9d7850ff6d" dependencies = [ "anyhow", "async-trait", @@ -4335,7 +4511,7 @@ dependencies = [ "env_logger 0.10.2", "futures", "human-panic 1.2.3", - "itertools", + "itertools 0.11.0", "libc", "log", "nix 0.27.1", @@ -4349,38 +4525,27 @@ dependencies = [ [[package]] name = "specifications" -version = "3.0.0" +version = "0.1.0" +source = "git+https://github.com/BraneFramework/policy-store#8afd377d3562b6159496691e4165304a41b38e17" dependencies = [ - "anyhow", - "async-trait", - "base64 0.22.1", - "base64ct", "chrono", - "const_format", - "enum-debug", - "futures", - "jsonwebtoken", - "log", - "parking_lot", - "prost", - "reqwest 0.12.14", - "semver", + "http 1.3.1", "serde", - "serde_json", - "serde_test", - "serde_with", - "serde_yml", - "strum 0.27.1", - "strum_macros 0.27.1", - "thiserror 2.0.12", - "tonic", - "uuid", +] + +[[package]] +name = "specifications" +version = "0.2.0" +source = "git+https://github.com/BraneFramework/policy-reasoner?branch=lib-refactor#f96e4639467e1640d8d80631c1dc08ff2980d750" +dependencies = [ + "paste", + "serde", + "share", ] [[package]] name = "specifications" version = "3.0.0" -source = "git+https://github.com/braneframework/brane#07430945248a2fd2dc2cc99b6c8477c47ae2f4d2" dependencies = [ "anyhow", "async-trait", @@ -4391,13 +4556,19 @@ dependencies = [ "enum-debug", "futures", "jsonwebtoken", + "lazy_static", "log", + "num-traits", "parking_lot", - "prost", + "policy-reasoner", + "policy-store", + "prost 0.13.5", + "rand 0.9.0", "reqwest 0.12.14", "semver", "serde", "serde_json", + "serde_json_any_key", "serde_test", "serde_with", "serde_yml", @@ -4415,29 +4586,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" [[package]] -name = "srv" -version = "0.1.0" -source = "git+https://github.com/braneframework/policy-reasoner#6b54ba81ea00e3096287a1873126e45b515f1a7f" +name = "sqlite-database" +version = "0.2.0" +source = "git+https://github.com/BraneFramework/policy-store#8afd377d3562b6159496691e4165304a41b38e17" dependencies = [ - "audit-logger", - "auth-resolver", - "brane-ast 3.0.0 (git+https://github.com/braneframework/brane)", - "brane-exe 3.0.0 (git+https://github.com/braneframework/brane)", "chrono", - "deliberation", - "error-trace", - "http 1.3.1", - "log", - "policy", - "problem_details", - "reasonerconn", + "deadpool", + "deadpool-diesel", + "diesel", + "diesel_migrations", "serde", "serde_json", - "state-resolver", + "specifications 0.1.0", + "thiserror 2.0.12", "tokio", - "uuid", - "warp", - "workflow", + "tracing", ] [[package]] @@ -4446,16 +4609,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" -[[package]] -name = "state-resolver" -version = "0.1.0" -source = "git+https://github.com/braneframework/policy-reasoner#6b54ba81ea00e3096287a1873126e45b515f1a7f" -dependencies = [ - "async-trait", - "serde", - "workflow", -] - [[package]] name = "static_assertions" version = "1.1.0" @@ -4515,6 +4668,27 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "supports-color" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6" +dependencies = [ + "is_ci", +] + +[[package]] +name = "supports-hyperlinks" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "804f44ed3c63152de6a9f90acbea1a110441de43006ea51bcce8f436196a288b" + +[[package]] +name = "supports-unicode" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" + [[package]] name = "syn" version = "1.0.109" @@ -4650,6 +4824,26 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "terminal_size" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" +dependencies = [ + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "textwrap" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" +dependencies = [ + "unicode-linebreak", + "unicode-width 0.2.0", +] + [[package]] name = "thiserror" version = "1.0.69" @@ -4919,7 +5113,7 @@ checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" dependencies = [ "async-stream", "async-trait", - "axum", + "axum 0.7.9", "base64 0.22.1", "bytes", "h2 0.4.8", @@ -4931,7 +5125,7 @@ dependencies = [ "hyper-util", "percent-encoding", "pin-project", - "prost", + "prost 0.13.5", "socket2", "tokio", "tokio-stream", @@ -4974,6 +5168,7 @@ dependencies = [ "tokio", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -5050,11 +5245,6 @@ dependencies = [ "tracing-log", ] -[[package]] -name = "transform" -version = "0.2.0" -source = "git+https://github.com/Lut99/transform-rs?tag=v0.2.0#ea708962d9a770c1f89c3bce0b9309dfe48656a6" - [[package]] name = "try-lock" version = "0.2.5" @@ -5108,6 +5298,12 @@ version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +[[package]] +name = "unicode-linebreak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" + [[package]] name = "unicode-segmentation" version = "1.12.0" @@ -5708,19 +5904,11 @@ dependencies = [ [[package]] name = "workflow" -version = "0.1.0" -source = "git+https://github.com/braneframework/policy-reasoner#6b54ba81ea00e3096287a1873126e45b515f1a7f" +version = "0.2.0" +source = "git+https://github.com/BraneFramework/policy-reasoner?branch=lib-refactor#f96e4639467e1640d8d80631c1dc08ff2980d750" dependencies = [ - "brane-ast 3.0.0 (git+https://github.com/braneframework/brane)", - "brane-exe 3.0.0 (git+https://github.com/braneframework/brane)", - "eflint-json", "enum-debug", - "log", - "num-traits", - "rand 0.9.0", "serde", - "specifications 3.0.0 (git+https://github.com/braneframework/brane)", - "transform", ] [[package]] @@ -5772,8 +5960,8 @@ dependencies = [ "brane-cfg", "brane-cli", "brane-ctl", - "brane-dsl 3.0.0", - "brane-shr 3.0.0", + "brane-dsl", + "brane-shr", "brane-tsk", "clap", "clap_complete", diff --git a/Cargo.toml b/Cargo.toml index 03f306b4..4a3ba221 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,7 +25,8 @@ members = [ # These crates implement services for worker nodes "brane-job", "brane-reg", - "brane-let", + "brane-chk", + "brane-let", # Not a service, container-local binary # These crates implement services that occur on any type of node "brane-prx", diff --git a/Dockerfile.dev b/Dockerfile.dev deleted file mode 100644 index 54f9d598..00000000 --- a/Dockerfile.dev +++ /dev/null @@ -1,152 +0,0 @@ -# DOCKERFILE.dev for BRANE -# by Tim Müller and Onno Valkering -# -# Contains the Dockerfile for the various Brane instance images. -# -# This version builds the development images, which prefer build speed and -# debugging over fast executables by simply copying pre-build executables from -# the project 'target' directory. -# -# This results in much faster build times when building repeatedly, due to the -# build cache being re-used, and (on virtualized Docker environments) much -# faster disk I/O times. -# -# For a release version, see Dockerfile.rls. -# - - -##### BASE IMAGE ##### -# This image defines the base image for all Brane service images. -# Note: we don't do 20.04 because the skopeo alternative link has died -# Note: we'd like to go to 22.04, but for now this is in conflict with OpenSSL -# Note: actually we're doing 22.04 but using 21.10 repos -FROM ubuntu:22.04 AS brane-base -LABEL org.opencontainers.image.source https://github.com/braneframework/brane - -# Add the log directory -RUN mkdir -p /logs/profile - -# Add an ubuntu 21.10 source for libssl1.1 (insecure, but it's the dev image anyway) -RUN echo "deb http://old-releases.ubuntu.com/ubuntu impish-security main" >> /etc/apt/sources.list - -# Install libssl (the Rust crate depends on it) -RUN apt-get update && apt-get install -y \ - libssl1.1 \ - && rm -rf /var/lib/apt/lists/* - -# If ever run, run a shell -ENTRYPOINT [ "/bin/bash" ] - - - - - -##### BRANE-PRX ##### -# This image contains the Brane proxy service. -FROM brane-base AS brane-prx - -# Define the architecture argument -ARG ARCH - -# Copy `brane-prx` from build stage -COPY ./.container-bins/$ARCH/brane-prx /brane-prx -RUN chmod +x /brane-prx - -# Run the compiled executable as base -ENTRYPOINT [ "/brane-prx", "--debug" ] - - - - - -##### BRANE-API ##### -# This image contains the Brane API service. -FROM brane-base AS brane-api - -# Define the architecture argument -ARG ARCH - -# Install additional runtime dependencies specific for brane-api -RUN apt-get update && apt-get install -y \ - gnupg2 \ - wget \ - && rm -rf /var/lib/apt/lists/* - -# Copy `brane-api` from build stage -COPY ./.container-bins/$ARCH/brane-api /brane-api -RUN chmod +x /brane-api - -# Run the compiled executable as base -ENTRYPOINT [ "/brane-api", "--debug" ] - - - - - -##### BRANE-DRV ##### -# This image contains the Brane driver service. -FROM brane-base AS brane-drv - -# Define the architecture argument -ARG ARCH - -# Copy `brane-drv` from build stage -COPY ./.container-bins/$ARCH/brane-drv /brane-drv -RUN chmod +x /brane-drv - -# Run the compiled executable as base -ENTRYPOINT [ "/brane-drv", "--debug" ] - - - - - -##### BRANE-PLR ##### -# This image contains the Brane planner service. -FROM brane-base AS brane-plr - -# Define the architecture argument -ARG ARCH - -# Copy `brane-plr` from build stage -COPY ./.container-bins/$ARCH/brane-plr /brane-plr -RUN chmod +x /brane-plr - -# Run the compiled executable as base -ENTRYPOINT [ "/brane-plr", "--debug" ] - - - - - -##### BRANE-JOB ##### -# This image contains the Brane job service. -FROM brane-base AS brane-job - -# Define the architecture argument -ARG ARCH - -# Copy `brane-job` from build stage -COPY ./.container-bins/$ARCH/brane-job /brane-job -RUN chmod +x /brane-job - -# Run the compiled executable as base -ENTRYPOINT [ "/brane-job", "--debug" ] - - - - - -##### BRANE-REG ##### -# This image contains the Brane registry service. -FROM brane-base AS brane-reg - -# Define the architecture argument -ARG ARCH - -# Copy `brane-job` from build stage -COPY ./.container-bins/$ARCH/brane-reg /brane-reg -RUN chmod +x /brane-reg - -# Run the compiled executable as base -ENTRYPOINT [ "/brane-reg", "--debug" ] diff --git a/Dockerfile.let b/Dockerfile.let new file mode 100644 index 00000000..3e00cddd --- /dev/null +++ b/Dockerfile.let @@ -0,0 +1,55 @@ +# DOCKERFILE.let for BRANE +# by Tim Müller +# +# Contains the Dockerfile for building a `branelet` binary in a container. +# +# This is necessary when running a non-released Brane (i.e., no precompiled binaries) and on a +# system that has a newer GLIBC than would run in the container. +# +# The easiest way to use it is through `make brane-let-docker` +# + + +# NOTE: Ensure this is the same as used by `brane-cli`! +FROM ubuntu:20.04 +LABEL org.opencontainers.image.source=https://github.com/epi-project/brane + +# Define some build args +ARG USERID=1000 +ARG GROUPID=1000 + +# Setup a user mirroring the main one +RUN if [[ ! "$(getent group $GROUPID)" ]]; then addgroup --gid $GROUPID brane && export GROUP=brane else export GROUP=$(getent group $GROUPID | cut -d: -f1); fi +RUN adduser --uid $USERID --gid $GROUPID --gecos "Brane" --disabled-password brane + +# Install build dependencies +RUN apt-get update && DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install -y \ + gcc g++ \ + make cmake \ + perl curl \ + && rm -rf /var/lib/apt/lists/* + +# Prepare the build directory while we're root +RUN mkdir -p /build/target \ + && chown -R brane:$GROUP /build + +# Install rust +USER brane +RUN bash -c "curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y" \ + && echo ". /home/brane/.cargo/env" >> /home/brane/.profile + +# Copy over relevant crates & other files +COPY --chown=brane:$GROUP . /build + +# Build the binary +WORKDIR /build +RUN --mount=type=cache,id=cargoidx,uid=$USERID,target=/home/brane/.cargo/registry \ + --mount=type=cache,id=braneletcache,uid=$USERID,target=/build/target \ + . /home/brane/.profile \ + && cargo build \ + --release \ + --package brane-let \ + && cp ./target/release/branelet /home/brane/branelet + +# Done +ENTRYPOINT ["cp", "/home/brane/branelet", "/output/branelet"] diff --git a/Dockerfile.rls b/Dockerfile.rls index fca760c5..ee0cff66 100644 --- a/Dockerfile.rls +++ b/Dockerfile.rls @@ -10,8 +10,16 @@ ##### BUILD STAGE ##### ### This file will act as the bottom for both builder images -FROM rust:1 AS build-common -LABEL org.opencontainers.image.source https://github.com/braneframework/brane +FROM rust:1 AS build-brane +LABEL org.opencontainers.image.source=https://github.com/epi-project/brane + +# Define some build args +ARG USERID=1000 +ARG GROUPID=1000 + +# Setup a user mirroring the main one +RUN addgroup --gid $GROUPID brane +RUN adduser --uid $USERID --gid $GROUPID --gecos "Brane" --disabled-password brane # Install build dependencies (that are not in the rust image already) RUN apt-get update && apt-get install -y \ @@ -21,60 +29,30 @@ RUN apt-get update && apt-get install -y \ && rm -rf /var/lib/apt/lists/* # Copy over relevant crates & other files -RUN mkdir /build -COPY . /build - - - -### This file does the Brane services -FROM build-common AS build-brane -LABEL org.opencontainers.image.source https://github.com/braneframework/brane +RUN mkdir /build && chown -R brane:brane /build +COPY --chown=brane:brane . /build # Build optimized binaries +USER brane WORKDIR /build -RUN --mount=type=cache,id=cargoidx,target=/usr/local/cargo/registry \ - --mount=type=cache,id=branecache,target=/build/target \ +RUN --mount=type=cache,id=cargoidx,uid=$USERID,target=/usr/local/cargo/registry \ + --mount=type=cache,id=branecache,uid=$USERID,target=/build/target \ cargo build \ --release \ --package brane-api \ + --package brane-chk \ --package brane-drv \ --package brane-job \ --package brane-prx \ --package brane-plr \ --package brane-reg \ - && cp ./target/release/brane-api /brane-api \ - && cp ./target/release/brane-drv /brane-drv \ - && cp ./target/release/brane-job /brane-job \ - && cp ./target/release/brane-prx /brane-prx \ - && cp ./target/release/brane-plr /brane-plr \ - && cp ./target/release/brane-reg /brane-reg - -# If ever run, run a shell -WORKDIR / -ENTRYPOINT [ "/bin/bash" ] - - - -### This file does the policy reasoner service -FROM build-common AS build-policy-reasoner -LABEL org.opencontainers.image.source https://github.com/braneframework/brane - -# Fetch the reasoner code next -WORKDIR / -ARG REASONER=eflint -ARG REASONER_BRANCH=main -ADD "https://github.com/braneframework/policy-reasoner/zipball/${REASONER_BRANCH}/" /policy-reasoner.zip -RUN unzip /policy-reasoner.zip && mv /BraneFramework-policy-reasoner* /policy-reasoner - -# Touch a `policy.db` into action to avoid the policy reasoner's `build.rs` building it for us (unnecessary) -RUN mkdir -p /policy-reasoner/data && touch /policy-reasoner/data/policy.db - -# Compile it! -WORKDIR /policy-reasoner -RUN --mount=type=cache,id=cargoidx,target=/usr/local/cargo/registry \ - --mount=type=cache,id=reasonercache,target=/policy-reasoner/target \ - cargo build --release --bin $REASONER --features brane-api-resolver \ - && cp ./target/release/$REASONER /brane-chk + && cp ./target/release/brane-api /home/brane/brane-api \ + && cp ./target/release/brane-chk /home/brane/brane-chk \ + && cp ./target/release/brane-drv /home/brane/brane-drv \ + && cp ./target/release/brane-job /home/brane/brane-job \ + && cp ./target/release/brane-prx /home/brane/brane-prx \ + && cp ./target/release/brane-plr /home/brane/brane-plr \ + && cp ./target/release/brane-reg /home/brane/brane-reg # If ever run, run a shell WORKDIR / @@ -82,26 +60,29 @@ ENTRYPOINT [ "/bin/bash" ] -### This target does the eflint-server binary -FROM ubuntu:22.04 AS build-eflint-server +### This target does the eflint-repl binary +FROM haskell:9.2.8 AS build-eflint-repl # Define build args -ARG ARCH=amd64 +ARG USERID=1000 +ARG GROUPID=1000 +ARG EFLINT_COMMIT=87d2be70c400b6f252fa28b51a4b5afbc947117c -# Install deps -RUN apt-get update && apt-get install -y \ - git wget \ - && rm -rf /var/lib/apt/lists/* +# Setup a user mirroring the main one +RUN addgroup --gid $GROUPID brane +RUN adduser --uid $USERID --gid $GROUPID --gecos "Brane" --disabled-password brane -# Install go -RUN wget https://go.dev/dl/go1.22.1.linux-$ARCH.tar.gz -O - | tar -xvz +# Add a haskell GHC alias for 9.2 +RUN ln -s /opt/ghc/9.2.8/bin/ghc /opt/ghc/9.2.8/bin/ghc-9.2 # Fetch the repo & compile it -RUN PATH="$PATH:/go/bin" \ - && git clone https://github.com/epi-project/eflint-server-go /eflint-server-go \ - && cd /eflint-server-go/cmd/eflint-server \ - && go build . \ - && mv ./eflint-server /eflint-server +USER brane +ADD --chown=brane:brane https://gitlab.com/eflint/haskell-implementation/-/archive/$EFLINT_COMMIT/haskell-implementation-$EFLINT_COMMIT.zip /home/brane/haskell-implementation.zip +RUN cd /home/brane \ + && unzip haskell-implementation.zip \ + && cd haskell-implementation-$EFLINT_COMMIT \ + && cabal update \ + && cabal install eflint-repl @@ -110,19 +91,28 @@ RUN PATH="$PATH:/go/bin" \ ##### BASE IMAGE ##### # This image defines the base image for all Brane service images. FROM ubuntu:22.04 AS brane-base -LABEL org.opencontainers.image.source https://github.com/braneframework/brane +LABEL org.opencontainers.image.source=https://github.com/epi-project/brane + +# Define some build args +ARG USERID=1000 +ARG GROUPID=1000 + +# Setup a user mirroring the main one +RUN addgroup --gid $GROUPID brane +RUN adduser --uid $USERID --gid $GROUPID --gecos "Brane" --disabled-password brane # Add the log directory -RUN mkdir -p /logs/profile +RUN mkdir -p /logs/profile && chown -R brane:brane /logs/profile +USER brane -# Add an ubuntu 21.10 source for libssl1.1 (insecure, but it's the dev image anyway) -# Can't get around that, even in release, since libssl1.1 is only available in old repos -RUN echo "deb http://old-releases.ubuntu.com/ubuntu impish-security main" >> /etc/apt/sources.list +# # Add an ubuntu 21.10 source for libssl1.1 (insecure, but it's the dev image anyway) +# # Can't get around that, even in release, since libssl1.1 is only available in old repos +# RUN echo "deb http://old-releases.ubuntu.com/ubuntu impish-security main" >> /etc/apt/sources.list -# Install libssl (the Rust crate depends on it) -RUN apt-get update && apt-get install -y \ - libssl1.1 \ - && rm -rf /var/lib/apt/lists/* +# # Install libssl (the Rust crate depends on it) +# RUN apt-get update && apt-get install -y \ +# libssl1.1 \ +# && rm -rf /var/lib/apt/lists/* # If ever run, run a shell ENTRYPOINT [ "/bin/bash" ] @@ -136,10 +126,10 @@ ENTRYPOINT [ "/bin/bash" ] FROM brane-base AS brane-prx # Copy `brane-prx` from build stage -COPY --from=build-brane /brane-prx /brane-prx +COPY --from=build-brane --chown=brane:brane /home/brane//brane-prx /brane-prx # Run the compiled executable as base -ENTRYPOINT [ "./brane-prx" ] +ENTRYPOINT [ "/brane-prx" ] @@ -150,15 +140,17 @@ ENTRYPOINT [ "./brane-prx" ] FROM brane-base AS brane-api # Install additional runtime dependencies specific for brane-api +USER root RUN apt-get update && apt-get install -y \ gnupg2 \ wget \ && rm -rf /var/lib/apt/lists/* # Copy `brane-api` from build stage -COPY --from=build-brane /brane-api /brane-api +COPY --from=build-brane --chown=brane:brane /home/brane/brane-api /brane-api # Run the compiled executable as base +USER brane ENTRYPOINT [ "/brane-api" ] @@ -170,10 +162,10 @@ ENTRYPOINT [ "/brane-api" ] FROM brane-base AS brane-drv # Copy `brane-drv` from build stage -COPY --from=build-brane /brane-drv /brane-drv +COPY --from=build-brane --chown=brane:brane /home/brane/brane-drv /brane-drv # Run the compiled executable as base -ENTRYPOINT [ "./brane-drv" ] +ENTRYPOINT [ "/brane-drv" ] @@ -184,10 +176,10 @@ ENTRYPOINT [ "./brane-drv" ] FROM brane-base AS brane-plr # Copy `brane-plr` from build stage -COPY --from=build-brane /brane-plr /brane-plr +COPY --from=build-brane --chown=brane:brane /home/brane/brane-plr /brane-plr # Run the compiled executable as base -ENTRYPOINT [ "./brane-plr" ] +ENTRYPOINT [ "/brane-plr" ] @@ -197,11 +189,19 @@ ENTRYPOINT [ "./brane-plr" ] # This image contains the Brane job service. FROM brane-base AS brane-job +# ARG SOCK_GROUPID=1000 + +# Add the brane user to the group we need to access the Docker socket +# Very unfortunate, this. Don't know how else to mount the socket as non-root :/ +USER root +RUN adduser brane root + # Copy `brane-job` from build stage -COPY --from=build-brane /brane-job /brane-job +COPY --from=build-brane --chown=brane:brane /home/brane/brane-job /brane-job # Run the compiled executable as base -ENTRYPOINT [ "./brane-job" ] +# USER brane +ENTRYPOINT [ "/brane-job" ] @@ -212,10 +212,10 @@ ENTRYPOINT [ "./brane-job" ] FROM brane-base AS brane-reg # Copy `brane-reg` from build stage -COPY --from=build-brane /brane-reg /brane-reg +COPY --from=build-brane --chown=brane:brane /home/brane/brane-reg /brane-reg # Run the compiled executable as base -ENTRYPOINT [ "./brane-reg" ] +ENTRYPOINT [ "/brane-reg" ] @@ -223,31 +223,29 @@ ENTRYPOINT [ "./brane-reg" ] ##### BRANE-CHK ##### # This image contains the policy reasoner / checker! -FROM ubuntu:22.04 AS brane-chk +FROM brane-base AS brane-chk -# Install deps +# Define build args +ENV POLICY_FILE="./main.eflint" + +# Install more deps +USER root RUN apt-get update && apt-get install -y \ sqlite3 \ && rm -rf /var/lib/apt/lists/* -# Generate a start script -RUN printf '#!/bin/bash\n# Startup script for the policy reasoner container\n\n# Spawn the eFLINT reasoner itself\n/eflint-server 2>&1 &\n\n# Launch the policy reasoner binary\n/brane-chk $@\n\n' > /startup.sh \ - && chmod +x /startup.sh - -# Install the eFLINT JSON server -# ADD https://github.com/Olaf-Erkemeij/eflint-server/raw/bd3997df89441f13cbc82bd114223646df41540d/eflint-server /eflint-server -# RUN chmod +x /eflint-server - -# Copy some config from the build stage -COPY --from=build-policy-reasoner /policy-reasoner/examples/config/jwt_resolver.yaml /examples/config/jwt_resolver.yaml - # Copy `policy-reasoner` from build stage -COPY --from=build-policy-reasoner /brane-chk /brane-chk +COPY --from=build-brane --chown=brane:brane /home/brane/brane-chk /brane-chk RUN chmod +x /brane-chk -# Copy `eflint-server` from build stage -COPY --from=build-eflint-server /eflint-server /eflint-server +# Copy `eflint-repl` from build stage +COPY --from=build-eflint-repl --chown=brane:brane /home/brane/.cabal/bin/eflint-repl /home/brane/eflint-repl + +# Copy the base policy +COPY --chown=brane:brane ./brane-chk/policy /home/brane/policy # Run the compiler executable as base -WORKDIR / -ENTRYPOINT [ "/startup.sh" ] +USER brane +# NOTE: Working from this dir to make the exposed `POLICY_FILE` env a little sensible +WORKDIR /home/brane/policy +ENTRYPOINT [ "/brane-chk", "--backend-cmd", "/home/brane/eflint-repl" ] diff --git a/Makefile b/Makefile index 6a25084b..b7e0dc3f 100644 --- a/Makefile +++ b/Makefile @@ -2,28 +2,56 @@ # This file is used to compile various parts of the Brane infrastructure and tooling # # + CENTRAL_SERVICES := brane-api brane-drv brane-plr WORKER_SERVICES := brane-job brane-reg brane-chk SHARED_SERVICES := brane-prx -BINARY_TARGETS := brane-ctl brane-cli brane-let +BINARY_TARGETS := brane-ctl brane-cli # brane-let <-- True but we need special treatment (always build as Linux, never as Darwin) BUILD_DIR := target -IMAGE_DIR := $(BUILD_DIR)/debug -BIN_DIR := $(BUILD_DIR)/debug +IMAGE_DIR := $(BUILD_DIR)/release +BIN_DIR := $(BUILD_DIR)/release WORKSPACE_MEMBERS := $(sort $(CENTRAL_SERVICES) $(WORKER_SERVICES) $(SHARED_SERVICES)) BUILDX_ARGS := build -CARGO_BUILD_ARGS := -IMAGE_DOCKER_FILE := ./Dockerfile.dev +CARGO_BUILD_ARGS := --release +IMAGE_DOCKER_FILE := ./Dockerfile.rls + +# Find the architecture of this machine +ifndef RUST_ARCH + SARCH := $(shell uname -m) + ifeq ($(SARCH),amd64) + RUST_ARCH := x86_64 + else ifeq ($(SARCH),x86_64) + RUST_ARCH := x86_64 + else ifeq ($(SARCH),x86-64) + RUST_ARCH := x86_64 + else ifeq ($(SARCH),aarch64) + RUST_ARCH := aarch64 + else ifeq ($(SARCH),arm64) + RUST_ARCH := aarch64 + else + RUST_ARCH := UNKNOWN + endif +endif + +# Find the group ID of the Docker socket +# This is necessary to give `brane-job` the right permissions for accessing it. I know. +SOCK_GID := $(id -g) +ifeq ($(shell uname),Darwin) + SOCK_GID := $(shell stat -f '%g' /var/run/docker.sock) +else ifeq ($(shell uname),Linux) + SOCK_GID := $(shell id -g "$(shell stat -C '%G' /var/run/docker.sock)") +endif # The binaries we can build in either debug or release mode -ifeq ($(PROFILE),release) - CARGO_BUILD_ARGS += --release - IMAGE_DOCKER_FILE := ./Dockerfile.rls - IMAGE_DIR := $(BUILD_DIR)/release - BIN_DIR := $(BUILD_DIR)/release +ifeq ($(PROFILE),debug) + CARGO_BUILD_ARGS := $(filter-out --release,$(CARGO_BUILD_ARGS)) + IMAGE_DOCKER_FILE := ./Dockerfile.dev + IMAGE_DIR := $(BUILD_DIR)/debug + BIN_DIR := $(BUILD_DIR)/debug endif # Sometimes docker buildx can take a cached version while there are actually some changes. With @@ -59,14 +87,28 @@ central-images: $(CENTRAL_SERVICES) $(SHARED_SERVICES) .PHONY: $(WORKSPACE_MEMBERS) $(WORKSPACE_MEMBERS): $(IMAGE_DIR) @echo "Building $@" - docker buildx $(BUILDX_ARGS) --output type="docker,dest=$(IMAGE_DIR)/$@.tar" --file $(IMAGE_DOCKER_FILE) --target $@ . + docker buildx $(BUILDX_ARGS) --output type="docker,dest=$(IMAGE_DIR)/$@.tar" --file $(IMAGE_DOCKER_FILE) --target $@ --build-arg "USERID=$(shell id -u)" --build-arg "SOCK_GROUPID=$(SOCK_GID)" . # Compilation of binaries .PHONY: $(BINARY_TARGETS) +brane-let: $(BIN_DIR) + @echo "Building $@" + cargo build $(CARGO_BUILD_ARGS) --target $(RUST_ARCH)-unknown-linux-musl --package $@ $(BINARY_TARGETS): $(BIN_DIR) @echo "Building $@" cargo build $(CARGO_BUILD_ARGS) --package $@ +# Compilation of branelet +.PHONY: brane-let-builder +brane-let-builder: + @echo "Building brane-let builder container" + docker buildx build --load -t brane-let-builder:latest -f Dockerfile.let --build-arg "USERID=$(shell id -u)" --build-arg "GROUPID=$(shell id -g)" . + +.PHONY: brane-let-docker +brane-let-docker: brane-let-builder $(BIN_DIR) + @echo "Building brane-let in a Docker container" + docker run -it --rm -v "$(shell pwd)/target/release:/output" brane-let-builder:latest + # Directory creation # It is important that we flag this directory as a CACHETAG.DIR. Various backup solutions for example will otherwise backup # the directory. This might seem nice, but these artifacts can be very large in size and should be reproducible anyway. diff --git a/brane-api/src/data.rs b/brane-api/src/data.rs index d26cc179..60917c94 100644 --- a/brane-api/src/data.rs +++ b/brane-api/src/data.rs @@ -4,7 +4,7 @@ // Created: // 26 Sep 2022, 17:20:55 // Last edited: -// 07 Jun 2023, 16:29:39 +// 19 Feb 2025, 17:29:19 // Auto updated? // Yes // @@ -82,7 +82,7 @@ pub async fn list(context: Context) -> Result { let mut datasets: HashMap = HashMap::new(); for (loc_name, loc) in infra { // Run a GET-request on `/data/info` to fetch all datasets in this domain - let address: String = format!("{}/data/info", loc.registry); + let address: String = format!("http://{}/data/info", loc.registry); let res: reqwest::Response = match context.proxy.get(&address, Some(NewPathRequestTlsOptions { location: loc_name.clone(), use_client_auth: false })).await { Ok(res) => match res { @@ -190,7 +190,7 @@ pub async fn get(name: String, context: Context) -> Result = None; for (loc_name, loc) in infra { // Run a GET-request on `/data` to fetch the specific dataset we're asked for - let address: String = format!("{}/data/info/{}", loc.registry, name); + let address: String = format!("http://{}/data/info/{}", loc.registry, name); let res: reqwest::Response = match context.proxy.get(&address, Some(NewPathRequestTlsOptions { location: loc_name.clone(), use_client_auth: false })).await { Ok(res) => match res { diff --git a/brane-api/src/infra.rs b/brane-api/src/infra.rs index 04b33dde..f299ee61 100644 --- a/brane-api/src/infra.rs +++ b/brane-api/src/infra.rs @@ -4,7 +4,7 @@ // Created: // 02 Nov 2022, 16:21:33 // Last edited: -// 13 Jul 2023, 13:58:57 +// 19 Feb 2025, 17:29:27 // Auto updated? // Yes // @@ -194,7 +194,7 @@ pub async fn get_capabilities(loc: String, context: Context) -> Result match res { Ok(res) => res, diff --git a/brane-api/src/packages.rs b/brane-api/src/packages.rs index 2973aba0..e3f1645f 100644 --- a/brane-api/src/packages.rs +++ b/brane-api/src/packages.rs @@ -4,7 +4,7 @@ // Created: // 17 Oct 2022, 15:18:32 // Last edited: -// 08 Feb 2024, 16:16:22 +// 29 Apr 2025, 11:37:35 // Auto updated? // Yes // diff --git a/brane-ast/src/ast_unresolved.rs b/brane-ast/src/ast_unresolved.rs index 6e77e2ef..fec28e97 100644 --- a/brane-ast/src/ast_unresolved.rs +++ b/brane-ast/src/ast_unresolved.rs @@ -4,7 +4,7 @@ // Created: // 03 Sep 2022, 12:31:20 // Last edited: -// 12 Dec 2023, 15:10:22 +// 14 Nov 2024, 17:20:23 // Auto updated? // Yes // @@ -16,7 +16,8 @@ use std::collections::{HashMap, HashSet}; use std::sync::Arc; -use crate::ast::Metadata; +use specifications::wir::Metadata; + use crate::edgebuffer::EdgeBuffer; diff --git a/brane-ast/src/compile.rs b/brane-ast/src/compile.rs index 14a5bd43..9f64de1e 100644 --- a/brane-ast/src/compile.rs +++ b/brane-ast/src/compile.rs @@ -4,7 +4,7 @@ // Created: // 12 Sep 2022, 18:12:44 // Last edited: -// 13 Dec 2023, 08:22:16 +// 29 Apr 2025, 13:39:18 // Auto updated? // Yes // @@ -19,8 +19,8 @@ use brane_dsl::{Error as ParseError, ParserOptions}; use log::trace; use specifications::data::DataIndex; use specifications::package::PackageIndex; +use specifications::wir::Workflow; -use crate::ast::Workflow; use crate::ast_unresolved::UnresolvedWorkflow; pub use crate::errors::AstError as Error; use crate::state::CompileState; diff --git a/brane-ast/src/dsl.rs b/brane-ast/src/dsl.rs new file mode 100644 index 00000000..6efd1c57 --- /dev/null +++ b/brane-ast/src/dsl.rs @@ -0,0 +1,109 @@ +// DATA TYPE.rs +// by Lut99 +// +// Created: +// 30 Aug 2022, 12:02:57 +// Last edited: +// 14 Nov 2024, 17:47:02 +// Auto updated? +// Yes +// +// Description: +//! Defines conversions for compatability with [`brane_dsl`] types. +// + +use brane_dsl::data_type::FunctionSignature; +use brane_dsl::location::AllowedLocations; +use specifications::wir::builtins::BuiltinClasses; +use specifications::wir::data_type::DataType; +use specifications::wir::locations::Locations; + + +/***** LIBRARY *****/ +/// Converts from a [DSL Datatype](brane_dsl::DataType) to the executable one. +/// +/// # Arguments +/// - `dtype`: The [`DataType`](brane_dsl::DataType) to convert. +/// +/// # Returns +/// A converted [`DataType`]. +#[inline] +pub fn dtype_dsl_to_ast(value: brane_dsl::DataType) -> DataType { + use brane_dsl::DataType::*; + match value { + Any => DataType::Any, + Void => DataType::Void, + + Boolean => DataType::Boolean, + Integer => DataType::Integer, + Real => DataType::Real, + String => DataType::String, + Semver => DataType::Semver, + + Array(a) => DataType::Array { elem_type: Box::new(dtype_dsl_to_ast(*a)) }, + Function(sig) => { + DataType::Function { args: sig.args.into_iter().map(|d| dtype_dsl_to_ast(d)).collect(), ret: Box::new(dtype_dsl_to_ast(sig.ret)) } + }, + Class(name) => { + // Match if 'Data' or 'IntermediateResult' + if name == BuiltinClasses::Data.name() { + DataType::Data + } else if name == BuiltinClasses::IntermediateResult.name() { + DataType::IntermediateResult + } else { + DataType::Class { name } + } + }, + } +} + +/// Converts from an [executable Datatype](DataType) to the DSL one. +/// +/// # Arguments +/// - `dtype`: The [`DataType`](DataType) to convert. +/// +/// # Returns +/// A converted [`brane_dsl::DataType`]. +#[inline] +pub fn dtype_ast_to_dsl(value: DataType) -> brane_dsl::DataType { + use brane_dsl::DataType::*; + match value { + DataType::Any => Any, + DataType::Void => Void, + + DataType::Numeric | DataType::Addable | DataType::Callable | DataType::NonVoid => { + panic!("Cannot convert permissive data type (i.e., set of types) to a single brane_dsl::DataType") + }, + + DataType::Boolean => Boolean, + DataType::Integer => Integer, + DataType::Real => Real, + DataType::String => String, + DataType::Semver => Semver, + + DataType::Array { elem_type } => Array(Box::new(dtype_ast_to_dsl(*elem_type))), + DataType::Function { args, ret } => { + Function(Box::new(FunctionSignature { args: args.into_iter().map(dtype_ast_to_dsl).collect(), ret: dtype_ast_to_dsl(*ret) })) + }, + DataType::Class { name } => Class(name), + DataType::Data => Class(BuiltinClasses::Data.name().into()), + DataType::IntermediateResult => Class(BuiltinClasses::IntermediateResult.name().into()), + } +} + + + +/// Converts from an [`AllowedLocations`] to a [`Locations`]. +/// +/// # Arguments +/// - `locs`: The [`AllowedLocations`] to convert. +/// +/// # Returns +/// A new [`Locations`]. +#[inline] +pub fn locs_dsl_to_ast(locs: AllowedLocations) -> Locations { + match locs { + AllowedLocations::All => Locations::All, + AllowedLocations::Exclusive(locs) => Locations::Restricted(locs.into_iter().map(|l| l.into()).collect()), + } +} diff --git a/brane-ast/src/edgebuffer.rs b/brane-ast/src/edgebuffer.rs index 99ef446b..3437154c 100644 --- a/brane-ast/src/edgebuffer.rs +++ b/brane-ast/src/edgebuffer.rs @@ -4,7 +4,7 @@ // Created: // 05 Sep 2022, 09:27:32 // Last edited: -// 02 Nov 2023, 14:25:07 +// 14 Nov 2024, 17:20:32 // Auto updated? // Yes // @@ -20,9 +20,8 @@ use std::hash::{Hash, Hasher}; use std::mem; use std::rc::Rc; -use brane_dsl::spec::MergeStrategy; - -use crate::ast::Edge; +use specifications::wir::Edge; +use specifications::wir::merge_strategy::MergeStrategy; /***** TESTS *****/ diff --git a/brane-ast/src/errors.rs b/brane-ast/src/errors.rs index 28b457ce..dc670058 100644 --- a/brane-ast/src/errors.rs +++ b/brane-ast/src/errors.rs @@ -4,7 +4,7 @@ // Created: // 10 Aug 2022, 13:52:37 // Last edited: -// 31 Jan 2024, 11:35:11 +// 14 Nov 2024, 17:15:21 // Auto updated? // Yes // @@ -17,12 +17,11 @@ use std::fmt::Display; use std::io::Write; use brane_dsl::ast::Expr; -use brane_dsl::spec::MergeStrategy; use brane_dsl::{DataType, TextRange}; use console::{Style, style}; use specifications::version::Version; - -use crate::spec::BuiltinClasses; +use specifications::wir::builtins::BuiltinClasses; +use specifications::wir::merge_strategy::MergeStrategy; /***** HELPER MACROS *****/ diff --git a/brane-ast/src/lib.rs b/brane-ast/src/lib.rs index 6f68029e..f0aa7c8c 100644 --- a/brane-ast/src/lib.rs +++ b/brane-ast/src/lib.rs @@ -4,7 +4,7 @@ // Created: // 10 Aug 2022, 13:51:38 // Last edited: -// 16 Jan 2024, 11:32:14 +// 14 Nov 2024, 17:18:32 // Auto updated? // Yes // @@ -20,28 +20,21 @@ extern crate lazy_static; // Declare the modules -pub mod ast; pub mod ast_unresolved; pub mod compile; -pub mod data_type; +pub mod dsl; pub mod edgebuffer; pub mod errors; pub mod fetcher; -pub mod func_id; -pub mod locations; -pub mod spec; pub mod state; pub mod traversals; pub mod warnings; - // Re-export some stuff from brane-dsl -pub use ast::{SymTable, Workflow}; pub use ast_unresolved::UnresolvedWorkflow; pub use brane_dsl::ParserOptions; -pub use brane_dsl::spec::{MergeStrategy, TextPos, TextRange}; +pub use brane_dsl::spec::{TextPos, TextRange}; pub use compile::{CompileResult, CompileStage, compile_program, compile_program_to, compile_snippet, compile_snippet_to}; -pub use data_type::DataType; // Bring some stuff into the global namespace. pub use errors::AstError as Error; pub use warnings::AstWarning as Warning; diff --git a/brane-ast/src/spec.rs b/brane-ast/src/spec.rs deleted file mode 100644 index ef102af0..00000000 --- a/brane-ast/src/spec.rs +++ /dev/null @@ -1,201 +0,0 @@ -// SPEC.rs -// by Lut99 -// -// Created: -// 20 Oct 2022, 14:17:30 -// Last edited: -// 07 Nov 2023, 17:15:25 -// Auto updated? -// Yes -// -// Description: -//! Defines (public) interfaces and structs for the `brane-ast` crate. -// - -use brane_dsl::data_type::FunctionSignature; -use brane_dsl::{DataType, TextRange}; -use strum::{EnumIter, IntoEnumIterator as _}; - -use crate::state::{ClassState, FunctionState, VarState}; - - -/***** LIBRARY *****/ -/// Defines the builtin functions that exist in BraneScript. -#[derive(Clone, Copy, Debug, EnumIter)] -pub enum BuiltinFunctions { - /// The print-function, which prints some text to stdout. - Print, - /// The println-function, which does the same as `Print` but now with a newline appended to the text. - PrintLn, - - /// The len-function, which returns the length of an array. - Len, - - /// The commit_builtin-function, which turns an IntermediateResult into a Data. - CommitResult, -} - -impl BuiltinFunctions { - /// Returns the identifier of this builtin function. - #[inline] - pub fn name(&self) -> &'static str { - use BuiltinFunctions::*; - match self { - Print => "print", - PrintLn => "println", - - Len => "len", - - CommitResult => "commit_result", - } - } - - /// Returns the signature of this specific builtin. - #[inline] - pub fn signature(&self) -> FunctionSignature { - use BuiltinFunctions::*; - match self { - Print => FunctionSignature::new(vec![DataType::String], DataType::Void), - PrintLn => FunctionSignature::new(vec![DataType::String], DataType::Void), - - Len => FunctionSignature::new(vec![DataType::Array(Box::new(DataType::Any))], DataType::Integer), - - CommitResult => FunctionSignature::new( - vec![DataType::String, DataType::Class(BuiltinClasses::IntermediateResult.name().into())], - DataType::Class(BuiltinClasses::Data.name().into()), - ), - } - } - - /// Returns an array with all the builtin functions in it. - #[inline] - pub fn all() -> [Self; 4] { [Self::Print, Self::PrintLn, Self::Len, Self::CommitResult] } - - /// Returns an Array with all of the builtin functions but already casted to FunctionStates. - #[inline] - pub fn all_into_state() -> [FunctionState; 4] { [Self::Print.into(), Self::PrintLn.into(), Self::Len.into(), Self::CommitResult.into()] } - - /// Checks if the given string is a builtin. - #[inline] - pub fn is_builtin(name: impl AsRef) -> bool { - // Note that the order in which we match (i.e., on self instead of name) is a little awkward but guarantees Rust will warns us if we change the set. - let name: &str = name.as_ref(); - for builtin in Self::iter() { - if name == builtin.name() { - return true; - } - } - false - } -} - -impl From for FunctionState { - #[inline] - fn from(value: BuiltinFunctions) -> Self { - Self { - name: value.name().into(), - signature: value.signature(), - - class_name: None, - - range: TextRange::none(), - } - } -} - - - -/// Defines the builtin classes that exist in BraneScript. -#[derive(Clone, Copy, Debug)] -pub enum BuiltinClasses { - /// The data-class. - Data, - /// The intermediate-result-class. - IntermediateResult, -} - -impl BuiltinClasses { - /// Returns the identifier of this builtin class. - #[inline] - pub fn name(&self) -> &'static str { - use BuiltinClasses::*; - match self { - Data => "Data", - IntermediateResult => "IntermediateResult", - } - } - - /// Returns a list of all properties (as `VarState`s) in this builtin class. - #[inline] - pub fn props(&self) -> Vec { - use BuiltinClasses::*; - match self { - Data => vec![VarState { - name: "name".into(), - data_type: DataType::String, - function_name: None, - class_name: Some(self.name().into()), - range: TextRange::none(), - }], - IntermediateResult => vec![VarState { - name: "path".into(), - data_type: DataType::String, - function_name: None, - class_name: Some(self.name().into()), - range: TextRange::none(), - }], - } - } - - /// Returns a list of all methods (as `FunctioNState`s) in this builtin class. - #[inline] - pub fn methods(&self) -> Vec { - use BuiltinClasses::*; - match self { - Data => vec![], - IntermediateResult => vec![], - } - } - - /// Returns an array with all the builtin classes in it. - #[inline] - pub fn all() -> [Self; 2] { [Self::Data, Self::IntermediateResult] } - - /// Returns an Array with all of the builtin functions but already casted to FunctionStates. - /// - /// # Arguments - /// - `funcs`: The list of function states to use for declaring new methods, if any. - #[inline] - pub fn all_into_state(funcs: &mut Vec) -> [ClassState; 2] { - [Self::Data.into_state(funcs), Self::IntermediateResult.into_state(funcs)] - } - - /// Creates a new ClassState for this BuiltinClasses, where we define the functions in the given TableList of functions. - /// - /// # Arguments - /// - `funcs`: The TableList of functions where to declare the new ones. - /// - /// # Returns - /// A new ClassState instance. - #[inline] - pub fn into_state(&self, funcs: &mut Vec) -> ClassState { - ClassState { - name: self.name().into(), - props: self.props(), - methods: self - .methods() - .into_iter() - .enumerate() - .map(|(i, state)| { - funcs.push(state); - i - }) - .collect(), - - package_name: None, - package_version: None, - - range: TextRange::none(), - } - } -} diff --git a/brane-ast/src/state.rs b/brane-ast/src/state.rs index c19d8155..b3066715 100644 --- a/brane-ast/src/state.rs +++ b/brane-ast/src/state.rs @@ -4,7 +4,7 @@ // Created: // 16 Sep 2022, 08:22:47 // Last edited: -// 13 Dec 2023, 08:20:26 +// 14 Nov 2024, 17:47:50 // Auto updated? // Yes // @@ -23,9 +23,10 @@ use brane_dsl::symbol_table::{ClassEntry, FunctionEntry, SymbolTable, VarEntry}; use brane_dsl::{DataType, TextRange}; use specifications::package::Capability; use specifications::version::Version; +use specifications::wir::builtins::{BuiltinClasses, BuiltinFunctions}; +use specifications::wir::{ClassDef, ComputeTaskDef, Edge, FunctionDef, SymTable, TaskDef, VarDef}; -use crate::ast::{ClassDef, ComputeTaskDef, Edge, FunctionDef, SymTable, TaskDef, VarDef}; -use crate::spec::{BuiltinClasses, BuiltinFunctions}; +use crate::dsl::{dtype_ast_to_dsl, dtype_dsl_to_ast}; /***** STATICS *****/ @@ -64,9 +65,9 @@ impl TableState { /// A new instance of the TableState. pub fn new() -> Self { // Construct the TableLists separately. - let mut funcs: Vec = Vec::from(BuiltinFunctions::all_into_state()); + let mut funcs: Vec = BuiltinFunctions::all().into_iter().map(|f| f.into()).collect(); let tasks: Vec = Vec::new(); - let classes: Vec = Vec::from(BuiltinClasses::all_into_state(&mut funcs)); + let classes: Vec = BuiltinClasses::all().into_iter().map(|c| ClassState::from_builtin(c, &mut funcs)).collect(); let vars: Vec = Vec::new(); // use that to construct the rest @@ -315,6 +316,20 @@ pub struct FunctionState { pub range: TextRange, } +impl From for FunctionState { + #[inline] + fn from(value: BuiltinFunctions) -> Self { + Self { + name: value.name().into(), + signature: FunctionSignature::from(value), + + class_name: None, + + range: TextRange::none(), + } + } +} + impl From<&FunctionState> for FunctionEntry { #[inline] fn from(value: &FunctionState) -> Self { @@ -340,7 +355,11 @@ impl From<&FunctionState> for FunctionEntry { impl From for FunctionDef { #[inline] fn from(value: FunctionState) -> Self { - FunctionDef { name: value.name, args: value.signature.args.into_iter().map(|d| d.into()).collect(), ret: value.signature.ret.into() } + FunctionDef { + name: value.name, + args: value.signature.args.into_iter().map(|d| dtype_dsl_to_ast(d)).collect(), + ret: dtype_dsl_to_ast(value.signature.ret), + } } } @@ -397,8 +416,8 @@ impl From for TaskDef { function: Box::new(FunctionDef { name: value.name, - args: value.signature.args.into_iter().map(|d| d.into()).collect(), - ret: value.signature.ret.into(), + args: value.signature.args.into_iter().map(|d| dtype_dsl_to_ast(d)).collect(), + ret: dtype_dsl_to_ast(value.signature.ret), }), args_names: value.arg_names, requirements: value.requirements, @@ -428,6 +447,51 @@ pub struct ClassState { } impl ClassState { + /// Converts a builtin class to a ClassState. + /// + /// # Arguments + /// - `builtin`: The [`BuiltinClasses`] to convert. + /// - `funcs`: A list of existing function states to extend with this class'es methods. + /// + /// # Returns + /// A new ClassState representing the builtin one. + pub fn from_builtin(builtin: BuiltinClasses, funcs: &mut Vec) -> Self { + // Collect the properties + let props: Vec = builtin + .props() + .into_iter() + .map(|(name, dtype)| VarState { + name: (*name).into(), + data_type: dtype_ast_to_dsl(dtype.clone()), + function_name: None, + class_name: Some(builtin.name().into()), + range: TextRange::none(), + }) + .collect(); + + // Collect the methods + let methods: Vec = builtin + .methods() + .into_iter() + .enumerate() + .map(|(i, (name, sig))| { + funcs.push(FunctionState { + name: (*name).into(), + signature: FunctionSignature { + args: sig.0.iter().map(|dtype| dtype_ast_to_dsl(dtype.clone())).collect(), + ret: dtype_ast_to_dsl(sig.1.clone()), + }, + class_name: Some(builtin.name().into()), + range: TextRange::none(), + }); + i + }) + .collect(); + + // Build the final state + ClassState { name: builtin.name().into(), props, methods, package_name: None, package_version: None, range: TextRange::none() } + } + /// Converts this ClassState into a ClassEntry, using the given list of functions to resolve the internal list. /// /// # Arguments @@ -524,7 +588,7 @@ impl From<&VarState> for VarEntry { impl From for VarDef { #[inline] - fn from(value: VarState) -> Self { Self { name: value.name, data_type: value.data_type.into() } } + fn from(value: VarState) -> Self { Self { name: value.name, data_type: dtype_dsl_to_ast(value.data_type) } } } diff --git a/brane-ast/src/traversals/compile.rs b/brane-ast/src/traversals/compile.rs index 20531c55..bff86cb6 100644 --- a/brane-ast/src/traversals/compile.rs +++ b/brane-ast/src/traversals/compile.rs @@ -4,7 +4,7 @@ // Created: // 31 Aug 2022, 11:32:04 // Last edited: -// 31 Jan 2024, 11:35:39 +// 14 Nov 2024, 17:18:56 // Auto updated? // Yes // @@ -20,14 +20,15 @@ use std::rc::Rc; use std::sync::Arc; use brane_dsl::ast as dsl; -use brane_dsl::spec::MergeStrategy; use brane_dsl::symbol_table::{FunctionEntry, VarEntry}; use enum_debug::EnumDebug as _; use log::warn; use specifications::data::DataName; +use specifications::wir as ast; +use specifications::wir::merge_strategy::MergeStrategy; -use crate::ast; use crate::ast_unresolved::UnresolvedWorkflow; +use crate::dsl::{dtype_dsl_to_ast, locs_dsl_to_ast}; use crate::edgebuffer::EdgeBuffer; use crate::errors::AstError; use crate::state::{CompileState, TableState}; @@ -496,7 +497,7 @@ fn pass_expr(expr: dsl::Expr, edges: &mut EdgeBuffer, _table: &TableState) { pass_expr(*expr, edges, _table); // Insert a linear edge with the cast instruction - edges.write(ast::Edge::Linear { instrs: vec![ast::EdgeInstr::Cast { res_type: (&target).into() }], next: usize::MAX }); + edges.write(ast::Edge::Linear { instrs: vec![ast::EdgeInstr::Cast { res_type: dtype_dsl_to_ast(target) }], next: usize::MAX }); }, Call { expr, args, st_entry, locations, input, result, metadata, range: _ } => { @@ -524,7 +525,7 @@ fn pass_expr(expr: dsl::Expr, edges: &mut EdgeBuffer, _table: &TableState) { // It's an external call; replace with a Node edge (so sorry everyone) edges.write(ast::Edge::Node { task: st_entry.unwrap().borrow().index, - locs: locations.into(), + locs: locs_dsl_to_ast(locations), at: None, input: input.into_iter().map(|d| (d.into(), None)).collect(), result, @@ -549,7 +550,7 @@ fn pass_expr(expr: dsl::Expr, edges: &mut EdgeBuffer, _table: &TableState) { // Now add the Array instruction in a linear edge edges.write(ast::Edge::Linear { - instrs: vec![ast::EdgeInstr::Array { length: values_len, res_type: (&data_type).into() }], + instrs: vec![ast::EdgeInstr::Array { length: values_len, res_type: dtype_dsl_to_ast(data_type) }], next: usize::MAX, }); }, @@ -559,7 +560,7 @@ fn pass_expr(expr: dsl::Expr, edges: &mut EdgeBuffer, _table: &TableState) { pass_expr(*index, edges, _table); // Write the index instruction in a linear edge - edges.write(ast::Edge::Linear { instrs: vec![ast::EdgeInstr::ArrayIndex { res_type: (&data_type).into() }], next: usize::MAX }); + edges.write(ast::Edge::Linear { instrs: vec![ast::EdgeInstr::ArrayIndex { res_type: dtype_dsl_to_ast(data_type) }], next: usize::MAX }); }, UnaOp { op, expr, .. } => { diff --git a/brane-ast/src/traversals/data.rs b/brane-ast/src/traversals/data.rs index 60da9d2d..4b6ffd48 100644 --- a/brane-ast/src/traversals/data.rs +++ b/brane-ast/src/traversals/data.rs @@ -4,7 +4,7 @@ // Created: // 25 Oct 2022, 13:34:31 // Last edited: -// 08 Dec 2023, 10:41:31 +// 14 Nov 2024, 17:17:01 // Auto updated? // Yes // @@ -22,10 +22,10 @@ use brane_dsl::symbol_table::{ClassEntry, FunctionEntry, SymbolTableEntry, VarEn use brane_dsl::{DataType, SymbolTable}; use enum_debug::EnumDebug as _; use log::debug; +use specifications::wir::builtins::{BuiltinClasses, BuiltinFunctions}; use uuid::Uuid; use crate::errors::AstError; -use crate::spec::{BuiltinClasses, BuiltinFunctions}; use crate::state::{CompileState, DataState}; diff --git a/brane-ast/src/traversals/print/ast.rs b/brane-ast/src/traversals/print/ast.rs index 68cadeb0..a763e543 100644 --- a/brane-ast/src/traversals/print/ast.rs +++ b/brane-ast/src/traversals/print/ast.rs @@ -4,7 +4,7 @@ // Created: // 31 Aug 2022, 09:25:11 // Last edited: -// 06 Feb 2024, 11:38:47 +// 14 Nov 2024, 17:18:47 // Auto updated? // Yes // @@ -15,10 +15,11 @@ use std::collections::HashSet; use std::io::Write; -use crate::ast::{Edge, EdgeInstr, FunctionDef, SymTable, TaskDef, Workflow}; -use crate::data_type::DataType; +use specifications::wir::data_type::DataType; +use specifications::wir::func_id::FunctionId; +use specifications::wir::{Edge, EdgeInstr, FunctionDef, SymTable, TaskDef, Workflow}; + pub use crate::errors::AstError as Error; -use crate::func_id::FunctionId; /***** MACROS ******/ diff --git a/brane-ast/src/traversals/print/ast_unresolved.rs b/brane-ast/src/traversals/print/ast_unresolved.rs index 9af7dc50..95e0f852 100644 --- a/brane-ast/src/traversals/print/ast_unresolved.rs +++ b/brane-ast/src/traversals/print/ast_unresolved.rs @@ -4,7 +4,7 @@ // Created: // 05 Sep 2022, 11:08:57 // Last edited: -// 12 Dec 2023, 19:03:43 +// 14 Nov 2024, 17:18:42 // Auto updated? // Yes // @@ -18,8 +18,8 @@ use std::collections::{HashMap, HashSet}; use std::io::Write; use brane_dsl::DataType; +use specifications::wir::{Edge, EdgeInstr}; -use crate::ast::{Edge, EdgeInstr}; use crate::ast_unresolved::UnresolvedWorkflow; use crate::edgebuffer::{EdgeBuffer, EdgeBufferNode, EdgeBufferNodeLink, EdgeBufferNodePtr}; pub use crate::errors::AstError as Error; diff --git a/brane-ast/src/traversals/resolve.rs b/brane-ast/src/traversals/resolve.rs index c392775d..eeca60fa 100644 --- a/brane-ast/src/traversals/resolve.rs +++ b/brane-ast/src/traversals/resolve.rs @@ -4,7 +4,7 @@ // Created: // 18 Aug 2022, 15:24:54 // Last edited: -// 12 Dec 2023, 17:13:11 +// 14 Nov 2024, 17:17:16 // Auto updated? // Yes // @@ -19,7 +19,6 @@ use std::rc::Rc; use brane_dsl::ast::{Block, Expr, Identifier, Node, Program, Stmt}; use brane_dsl::data_type::{ClassSignature, FunctionSignature}; -use brane_dsl::spec::MergeStrategy; use brane_dsl::symbol_table::{ClassEntry, FunctionEntry, SymbolTableEntry, VarEntry}; use brane_dsl::{DataType, SymbolTable, TextRange}; use enum_debug::EnumDebug as _; @@ -27,10 +26,11 @@ use log::trace; use specifications::data::DataIndex; use specifications::package::{PackageIndex, PackageInfo}; use specifications::version::Version; +use specifications::wir::builtins::{BuiltinClasses, BuiltinFunctions}; +use specifications::wir::merge_strategy::MergeStrategy; use crate::errors::AstError; pub use crate::errors::ResolveError as Error; -use crate::spec::{BuiltinClasses, BuiltinFunctions}; use crate::state::CompileState; diff --git a/brane-ast/src/traversals/typing.rs b/brane-ast/src/traversals/typing.rs index c1b58e4a..3f82e1bc 100644 --- a/brane-ast/src/traversals/typing.rs +++ b/brane-ast/src/traversals/typing.rs @@ -4,7 +4,7 @@ // Created: // 19 Aug 2022, 16:34:16 // Last edited: -// 08 Dec 2023, 11:09:07 +// 14 Nov 2024, 17:17:26 // Auto updated? // Yes // @@ -17,14 +17,14 @@ use std::cell::{Ref, RefCell, RefMut}; use std::rc::Rc; use brane_dsl::ast::{Block, Expr, Node, Program, Stmt}; -use brane_dsl::spec::MergeStrategy; use brane_dsl::symbol_table::{ClassEntry, FunctionEntry, SymbolTableEntry, VarEntry}; use brane_dsl::{DataType, SymbolTable, TextPos, TextRange}; use enum_debug::EnumDebug as _; +use specifications::wir::builtins::BuiltinClasses; +use specifications::wir::merge_strategy::MergeStrategy; use crate::errors::AstError; pub use crate::errors::TypeError as Error; -use crate::spec::BuiltinClasses; use crate::warnings::AstWarning; pub use crate::warnings::TypeWarning as Warning; diff --git a/brane-ast/src/traversals/workflow_resolve.rs b/brane-ast/src/traversals/workflow_resolve.rs index a94adaa3..72bad3b2 100644 --- a/brane-ast/src/traversals/workflow_resolve.rs +++ b/brane-ast/src/traversals/workflow_resolve.rs @@ -4,7 +4,7 @@ // Created: // 05 Sep 2022, 17:36:21 // Last edited: -// 06 Feb 2024, 11:36:49 +// 14 Nov 2024, 17:20:49 // Auto updated? // Yes // @@ -17,8 +17,8 @@ use std::cell::Ref; use std::collections::HashMap; use log::debug; +use specifications::wir::{Edge, SymTable, Workflow}; -use crate::ast::{Edge, SymTable, Workflow}; use crate::ast_unresolved::UnresolvedWorkflow; use crate::edgebuffer::{EdgeBuffer, EdgeBufferNode, EdgeBufferNodeLink, EdgeBufferNodePtr}; use crate::errors::AstError; diff --git a/brane-ast/src/warnings.rs b/brane-ast/src/warnings.rs index c380a539..8f6f0b83 100644 --- a/brane-ast/src/warnings.rs +++ b/brane-ast/src/warnings.rs @@ -4,7 +4,7 @@ // Created: // 05 Sep 2022, 16:08:42 // Last edited: -// 12 Dec 2023, 14:56:22 +// 14 Nov 2024, 17:14:53 // Auto updated? // Yes // @@ -16,11 +16,11 @@ use std::fmt::{Debug, Display, Formatter, Result as FResult}; use std::io::Write; use brane_dsl::TextRange; -use brane_dsl::spec::MergeStrategy; use console::{Style, style}; +use specifications::wir::builtins::BuiltinClasses; +use specifications::wir::merge_strategy::MergeStrategy; use crate::errors::{ewrite_range, n}; -use crate::spec::BuiltinClasses; /***** HELPER FUNCTIONS *****/ diff --git a/brane-cc/src/main.rs b/brane-cc/src/main.rs index 5ab7ffe4..49dcbc7c 100644 --- a/brane-cc/src/main.rs +++ b/brane-cc/src/main.rs @@ -4,7 +4,7 @@ // Created: // 18 Nov 2022, 14:36:55 // Last edited: -// 13 Jun 2024, 16:21:19 +// 14 Nov 2024, 17:48:52 // Auto updated? // Yes // @@ -21,7 +21,7 @@ use std::path::PathBuf; use brane_ast::state::CompileState; use brane_ast::traversals::print::ast; -use brane_ast::{CompileResult, ParserOptions, Workflow, compile_snippet}; +use brane_ast::{CompileResult, ParserOptions, compile_snippet}; use brane_cc::errors::CompileError; use brane_cc::spec::IndexLocation; use brane_dsl::Language; @@ -34,6 +34,7 @@ use humanlog::{DebugMode, HumanLogger}; use log::{debug, error, info, warn}; use specifications::data::DataIndex; use specifications::package::PackageIndex; +use specifications::wir::Workflow; diff --git a/brane-cfg/Cargo.toml b/brane-cfg/Cargo.toml index e3066e47..a7d97f05 100644 --- a/brane-cfg/Cargo.toml +++ b/brane-cfg/Cargo.toml @@ -13,7 +13,7 @@ enum-debug.workspace = true log = "0.4.22" rustls = "0.21.6" rustls-pemfile = "1.0.1" -serde = { version = "1.0.204", features = ["derive"] } +serde = { version = "1.0.215", features = ["derive"] } serde_yaml = { version = "0.0.10", package = "serde_yml" } thiserror = "2.0.0" tokio = { version = "1.38.0", features = [] } diff --git a/brane-cfg/src/backend.rs b/brane-cfg/src/backend.rs index 1f994693..768845f2 100644 --- a/brane-cfg/src/backend.rs +++ b/brane-cfg/src/backend.rs @@ -4,7 +4,7 @@ // Created: // 18 Oct 2022, 13:50:11 // Last edited: -// 23 May 2023, 15:22:15 +// 14 Nov 2024, 14:49:51 // Auto updated? // Yes // diff --git a/brane-cfg/src/infra.rs b/brane-cfg/src/infra.rs index 8c40a754..64929b53 100644 --- a/brane-cfg/src/infra.rs +++ b/brane-cfg/src/infra.rs @@ -4,7 +4,7 @@ // Created: // 04 Oct 2022, 11:04:33 // Last edited: -// 31 Jan 2024, 15:53:29 +// 14 Nov 2024, 14:49:46 // Auto updated? // Yes // diff --git a/brane-cfg/src/node.rs b/brane-cfg/src/node.rs index 39701702..72ade062 100644 --- a/brane-cfg/src/node.rs +++ b/brane-cfg/src/node.rs @@ -4,7 +4,7 @@ // Created: // 28 Feb 2023, 10:01:27 // Last edited: -// 07 Mar 2024, 09:52:57 +// 29 Apr 2025, 13:50:16 // Auto updated? // Yes // @@ -21,7 +21,7 @@ use std::str::FromStr; use enum_debug::EnumDebug; use serde::{Deserialize, Serialize}; -use specifications::address::Address; +use specifications::address::{Address, Host}; pub use crate::errors::NodeConfigError as Error; use crate::errors::NodeKindParseError; @@ -427,9 +427,9 @@ pub struct WorkerPaths { #[serde(alias = "policy_db")] pub policy_database: PathBuf, /// The path to the secret used for the deliberation endpoint in the checker. - pub policy_deliberation_secret: PathBuf, + pub policy_delib_secret: PathBuf, /// The path to the secret used for the policy expert endpoint in the checker. - pub policy_expert_secret: PathBuf, + pub policy_store_secret: PathBuf, /// The path the (persistent) audit log. Can be omitted to not have a persistent log. pub policy_audit_log: Option, /// The path to the proxy file, if applicable. Ignored if no service is present. @@ -456,7 +456,7 @@ pub struct WorkerServices { pub job: PublicService, /// Defines the checker service. #[serde(alias = "checker")] - pub chk: PrivateService, + pub chk: DoublePrivateService, /// Defines the proxy service. #[serde(alias = "proxy")] pub prx: PrivateOrExternalService, @@ -718,6 +718,21 @@ pub struct PrivateService { pub bind: SocketAddr, } +/// Defines what we need to know for a private service (i.e., a service that is only reachable from within the Docker network, i.e., the node) that has two separate endpoints. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct DoublePrivateService { + /// Defines the name of the Docker container. + pub name: String, + /// Defines how the services on the same node can reach this service (which can be optimized due to the same-Docker-network property). + pub host: Host, + /// The port of the deliberation API. + #[serde(alias = "deliberation")] + pub delib: u16, + /// The port of the storage API. + #[serde(alias = "storage")] + pub store: u16, +} + /// Defines a service that we do not host, but only use. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct ExternalService { diff --git a/brane-cfg/src/proxy.rs b/brane-cfg/src/proxy.rs index e6fd1b3f..0a5a474f 100644 --- a/brane-cfg/src/proxy.rs +++ b/brane-cfg/src/proxy.rs @@ -4,7 +4,7 @@ // Created: // 09 Mar 2023, 15:15:47 // Last edited: -// 16 Mar 2023, 15:39:53 +// 14 Nov 2024, 14:49:19 // Auto updated? // Yes // @@ -13,7 +13,7 @@ // use std::collections::HashMap; -use std::fmt::{Display, Formatter, Result as FResult}; +use std::fmt::{Debug, Display, Formatter, Result as FResult}; use std::ops::RangeInclusive; use std::str::FromStr; @@ -136,3 +136,12 @@ pub struct ForwardConfig { /// The protocol that we use to communicate to the proxy. pub protocol: ProxyProtocol, } +// impl Debug for ForwardConfig { +// #[inline] +// fn fmt(&self, f: &mut Formatter<'_>) -> FResult { +// let mut fmt = f.debug_struct("ForwardConfig"); +// fmt.field("address", &&self.address); +// fmt.field("protocol", &self.protocol); +// fmt.finish() +// } +// } diff --git a/brane-chk/Cargo.toml b/brane-chk/Cargo.toml new file mode 100644 index 00000000..47ec5be8 --- /dev/null +++ b/brane-chk/Cargo.toml @@ -0,0 +1,59 @@ +[package] +name = "brane-chk" +edition = "2021" +rust-version = "1.74" +version.workspace = true +repository.workspace = true +authors = ["Tim Müller", "Bas Kloosterman", "Daniel Voogsgerd"] +license.workspace = true +default-run = "brane-chk" + + +[[bin]] +name = "brane-chk" +path = "src/main.rs" + +[[bin]] +name = "wirc" +path = "src/workflow/compiler.rs" + + +[dependencies] +axum = "0.7.7" +base16ct = { version = "0.2.0", features = ["std"] } +clap = { version = "4.5.20", features = ["derive", "env"] } +futures = "0.3.31" +hyper = "1.5.0" +hyper-util = "0.1.9" +rand = "0.8.5" +reqwest = "0.12.9" +serde = "1.0.213" +serde_json = "1.0.120" +shlex = "1.3.0" +thiserror = "1.0.61" +tokio = { version = "1.42.0", default-features = false, features = ["signal"] } +tower-service = "0.3.3" +tracing = "0.1.40" +tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } + +# eflint-json = { git = "https://gitlab.com/eflint/json-spec-rs", branch = "incorrect-is-invariant", features = ["display_eflint"] } +enum-debug = { git = "https://github.com/Lut99/enum-debug", tag = "v1.1.0" } +error-trace = { git = "https://github.com/Lut99/error-trace-rs", tag = "v3.3.0", features = ["serde"] } +policy-reasoner = { git = "https://github.com/BraneFramework/policy-reasoner", branch = "lib-refactor", default-features = false, features = ["eflint-haskell-reasoner", "file-logger", "serde", "workflow"] } +policy-store = { git = "https://github.com/BraneFramework/policy-store", default-features = false, features = ["axum-server", "jwk-auth", "jwk-auth-kid", "sqlite-database", "sqlite-database-embedded-migrations"] } + +brane-ast = { path = "../brane-ast" } +brane-cfg = { path = "../brane-cfg" } +brane-tsk = { path = "../brane-tsk" } +specifications = { path = "../specifications" } + + +[dev-dependencies] +humanlog = { git = "https://github.com/Lut99/humanlog-rs", tag = "v0.2.0" } +names = { git = "https://github.com/Lut99/names-rs", features = ["rand", "three"] } + +brane-shr = { path = "../brane-shr" } + + +[features] +default = [] diff --git a/brane-chk/policy/README.md b/brane-chk/policy/README.md new file mode 100644 index 00000000..8b61d41d --- /dev/null +++ b/brane-chk/policy/README.md @@ -0,0 +1,6 @@ +# `brane-chk` interface policy +This directory contains the base policy that acts as an interface between user-written policy and the system's information. + +In particular, it defines common concepts about the system (e.g., workflows, users, etc) such that the checker can automatically inject Facts about the current state in a manner consistent with what the user expects. + +The [`main.eflint`](./main.eflint) file defines the entrypoint that collects the other files in the proper order. Start there, and follow `#require`s to find the structure of the base policy. diff --git a/brane-chk/policy/main.eflint b/brane-chk/policy/main.eflint new file mode 100644 index 00000000..a4b4c6c3 --- /dev/null +++ b/brane-chk/policy/main.eflint @@ -0,0 +1,36 @@ +// MAIN.eflint +// by Lut99 +// +// Created: +// 30 Nov 2023, 11:13:01 +// Last edited: +// 29 Apr 2025, 23:22:26 +// Auto updated? +// Yes +// +// Description: +// "Entrypoint" file that collects all eFLINT policy interface into one file. +// + + +///// DEFINITIONS ///// +// Add in Brane definitions (which is basically the world) +#require "state.eflint". + +// Add in the workflow definitions +#require "workflow_base.eflint". + +// Add in the metadata +#require "metadata.eflint". + +// Add in the queries +#require "queries.eflint". + + + +///// ASSERTIONS ///// +// Add state assertions +// #require "state_assert.eflint". + +// Add workflow assertions +// #require "workflow_assert.eflint". diff --git a/brane-chk/policy/metadata.eflint b/brane-chk/policy/metadata.eflint new file mode 100644 index 00000000..2c58159a --- /dev/null +++ b/brane-chk/policy/metadata.eflint @@ -0,0 +1,36 @@ +// METADATA.eflint +// by Lut99 +// +// Created: +// 30 Nov 2023, 11:23:48 +// Last edited: +// 30 Nov 2023, 11:48:36 +// Auto updated? +// Yes +// +// Description: +// Defines metadata that can be assigned to most parts of the policy. +// + +#require "state.eflint". +#require "workflow_base.eflint". + + +// Defines a cryptographic signature that proves a user saw something. Given as a pair of the person signing it and the physical signature. +Fact signature Identified by user * string. +// Predicate over `signature`s which denotes which of them have been validated to be correct by the checker. +Fact signature-valid Identified by signature. + +// An arbitrary piece of information related to various pieces of information to provide them with metadata. Given as a pair of the person defining/owning it and the information itself. +Fact tag Identified by user * string. + +// Relates a `tag` to a `signature`. +Fact metadata Identified by tag * signature. +// States that a tag has been assigned to a particular workflow by the person providing the signature. +Fact workflow-metadata Identified by workflow * metadata. +// States that a tag has been assigned to a particular node in a workflow by the person providing the signature. +Fact node-metadata Identified by node * metadata. +// States that a tag has been assigned to a particular asset by the person providing the signature. +Fact asset-metadata Identified by asset * metadata. +// States that a tag has been assigned to a particular user by the person providing the signature. +Fact user-metadata Identified by user * metadata. diff --git a/brane-chk/policy/queries.eflint b/brane-chk/policy/queries.eflint new file mode 100644 index 00000000..750c3f6d --- /dev/null +++ b/brane-chk/policy/queries.eflint @@ -0,0 +1,25 @@ +// QUERIES.eflint +// by Lut99 +// +// Created: +// 07 Dec 2023, 16:51:58 +// Last edited: +// 29 Apr 2025, 23:22:58 +// Auto updated? +// Yes +// +// Description: +// Defines the queries that can be asked by the checker to the policy. +// + +#require "workflow_base.eflint". + + +// Asks if a workflow as a whole is OK. +Fact workflow-to-execute Identified by workflow. +// Asks if a particular task is authorised by a checker in the context of a workflow. +Fact task-to-execute Identified by task. +// Asks if a particular input to a node can be transferred to the domain executing that node. +Fact dataset-to-transfer Identified by node-input. +// Asks if the result of a workflow may be transferred to the receiving party. +Fact result-to-transfer Identified by workflow-result-recipient. diff --git a/brane-chk/policy/state.eflint b/brane-chk/policy/state.eflint new file mode 100644 index 00000000..39638fdb --- /dev/null +++ b/brane-chk/policy/state.eflint @@ -0,0 +1,30 @@ +// STATE.eflint +// by Lut99 +// +// Created: +// 30 Nov 2023, 11:15:54 +// Last edited: +// 06 Dec 2023, 15:17:25 +// Auto updated? +// Yes +// +// Description: +// Defines Facts that we use to denote the instance/reasoner's state. Can +// be thought of as a Brane "core ontology". +// + + +// Defines users that exist in the system (by ID). +Fact user. +// Defines a user that may be considered for planning (i.e., they can act as a worker). +Fact domain Identified by user. + +// Defines an asset to the system. +Fact asset. +// States that a particular user has access to a particular asset. +// +// Observations: +// - If the `user` happens to be a `domain`, then the user may use their access to perform tasks on the data within Brane. +Fact asset-access Identified by asset * user. +// Defines assets that can be used in function-position in tasks (see `workflow_base.eflint`). +Fact code Identified by asset. diff --git a/brane-chk/policy/state_assert.eflint b/brane-chk/policy/state_assert.eflint new file mode 100644 index 00000000..c1cbcaef --- /dev/null +++ b/brane-chk/policy/state_assert.eflint @@ -0,0 +1,24 @@ +// STATE ASSERT.eflint +// by Lut99 +// +// Created: +// 06 Dec 2023, 11:59:57 +// Last edited: +// 06 Dec 2023, 15:17:27 +// Auto updated? +// Yes +// +// Description: +// Defines assertions that enforce particular assumptions on state +// predicates. +// + +#require "state.eflint". + + +// Assert that users exist when mentioned somewhere. +Invariant user-exists When + (Forall domain : domain.user) && (Forall asset-access : asset-access.user). +// Assert that assets exist when mentioned somewhere. +Invariant asset-exists When + (Forall asset-access : asset-access.asset) && (Forall code : code.asset). diff --git a/brane-chk/policy/tests/test_workflow_ext.eflint b/brane-chk/policy/tests/test_workflow_ext.eflint new file mode 100644 index 00000000..b18eb24d --- /dev/null +++ b/brane-chk/policy/tests/test_workflow_ext.eflint @@ -0,0 +1,63 @@ +// TEST WORKFLOW EXT.eflint +// by Lut99 +// +// Created: +// 30 Nov 2023, 14:20:44 +// Last edited: +// 06 Dec 2023, 15:36:32 +// Auto updated? +// Yes +// +// Description: +// File to test if the other files work. +// + +#require "../workflow_ext.eflint". + + +///// TESTS ///// ++asset(A). ++asset(B). ++asset(C). ++asset(D). + ++asset(F). ++code(asset(F)). ++asset(G). ++code(asset(G)). + ++user(Amy). ++domain(user(Amy)). ++user(Bob). ++domain(user(Bob)). ++user(Cho). ++domain(user(Cho)). + ++workflow(W). + ++node(workflow(W), X). ++task(node(workflow(W), X)). ++node-input(node(workflow(W), X), asset(F)). ++node-input-from(node-input(node(workflow(W), X), asset(F)), domain(user(Central))). ++function(node-input(node(workflow(W), X), asset(F)), "f"). ++node-input(node(workflow(W), X), asset(A)). ++node-input-from(node-input(node(workflow(W), X), asset(A)), domain(user(Amy))). ++node-output(node(workflow(W), X), asset(B)). ++node-at(node(workflow(W), X), domain(user(Amy))). + ++node(workflow(W), Y). ++task(node(workflow(W), Y)). ++node-input(node(workflow(W), Y), asset(G)). ++node-input-from(node-input(node(workflow(W), Y), asset(G)), domain(user(Central))). ++function(node-input(node(workflow(W), Y), asset(G)), "g"). ++node-input(node(workflow(W), Y), asset(B)). ++node-input-from(node-input(node(workflow(W), Y), asset(B)), domain(user(Amy))). ++node-output(node(workflow(W), Y), asset(C)). ++node-at(node(workflow(W), Y), domain(user(Bob))). + ++node(workflow(W), Z). ++commit(node(workflow(W), Z)). ++node-input(node(workflow(W), Z), asset(C)). ++node-input-from(node-input(node(workflow(W), Z), asset(C)), domain(user(Bob))). ++node-output(node(workflow(W), Z), asset(D)). ++node-at(node(workflow(W), Z), domain(user(Cho))). diff --git a/brane-chk/policy/workflow_assert.eflint b/brane-chk/policy/workflow_assert.eflint new file mode 100644 index 00000000..017c2f70 --- /dev/null +++ b/brane-chk/policy/workflow_assert.eflint @@ -0,0 +1,83 @@ +// WORKFLOW ASSERT.eflint +// by Lut99 +// +// Created: +// 30 Nov 2023, 11:55:00 +// Last edited: +// 13 Dec 2023, 16:11:25 +// Auto updated? +// Yes +// +// Description: +// Defines assertions that enforce particular assumptions on workflow +// predicates. +// + +#require "workflow_base.eflint". + + +// Asserts that users exist when mentioned somewhere. +Invariant user-exists-workflow When + (Forall workflow-result-recipient : workflow-result-recipient.user). +// Asserts that domains exist when mentioned somewhere. +Invariant domain-exists-workflow When + (Forall task-at : task-at.domain). +// Asserts that assets exist when mentioned somewhere. +Invariant asset-exists-workflow When + (Forall workflow-result : workflow-result.asset) && + (Forall node-input : node-input.asset) && (Forall node-output : node-output.asset). +// Asserts that codes exist when mentioned somewhere. +Invariant code-exists-workflow When + (Forall task-code : task-code.code). + +// Asserts that workflows exist when mentioned somewhere. +Invariant workflow-exists When + (Forall workflow-result : workflow-result.workflow) && (Forall node : node.workflow). +// Asserts that workflow-results exist when mentioned somewhere. +Invariant workflow-result-exists When + (Forall workflow-result-recipient : workflow-result-recipient.workflow-result). +// Asserts that nodes exist when mentioned somewhere. +Invariant node-exists When + (Forall node-input : node-input.node) && (Forall node-output : node-output.node) && + (Forall node-at : node-at.node) && + (Forall task : task.node) && (Forall commit : commit.node) && (Forall loop : loop.node) && (Forall loop-body : loop-body.node). +// Asserts that loops exist when mentioned somewhere. +Invariant loop-exists When + (Forall loop-body : loop-body.loop). +// // Asserts that node-depends-ons exist when mentioned somewhere +// Invariant node-depends-on-exists When +// (Forall node-depends-on-ensured : node-depends-on-ensured.node-depends-on). +// // Asserts that tasks exist when mentioned somewhere +// Invariant task-exists When +// (Forall task-code : task-code.task). + +// Asserts that every workflow has at most 1 result that is being received by someone. +Invariant workflow-at-most-one-result-recipient When + (Forall workflow : Count(Foreach asset, user : workflow-result-recipient(workflow-result(workflow, asset), user) When workflow-result-recipient(workflow-result(workflow, asset), user)) <= 1). + +// Asserts that every input has exactly one source location. +Invariant node-input-exactly-one-from When + (Forall node-input : Count(Foreach domain : node-input-from(node-input, domain) When node-input-from(node-input, domain)) == 1). +// Asserts that no node produces its own output. +Invariant node-not-recursive-input-output When + (Forall node : Not(Exists asset : (node-input(node, asset) && node-output(node, asset)))). +// Asserts that every node has at most 1 output. +Invariant node-at-most-one-output When + (Forall node : Count(Foreach asset : node-output(node, asset) When node-output(node, asset)) <= 1). +// Asserts that every node has exactly 1 assigned domain. +Invariant node-exactly-one-at When + (Forall node : Count(Foreach domain : node-at(node, domain) When node-at(node, domain)) == 1). + +// Asserts that every task has exactly 1 function. +Invariant task-exactly-one-function When + (Forall task : Count(Foreach function : function When (((function.node-input).node) == (task.node))) <= 1). +// Asserts that every function is only linking code. +Invariant function-over-code When + (Forall function : code((function.node-input).asset)). + +// Asserts every loop has exactly one body +Invariant loop-exactly-one-body When + (Forall loop : Count(Foreach node : loop-body(loop, node) When loop-body(loop, node)) == 1). +// Asserts that the input of a loop matches the first node in the loop's body +Invariant loop-input-equals-body-input When + (Forall loop-body : (Forall asset : (node-input((loop-body.loop).node, asset) && node-input(loop-body.node, asset)) When (node-input((loop-body.loop).node, asset) || node-input(loop-body.node, asset)))). diff --git a/brane-chk/policy/workflow_base.eflint b/brane-chk/policy/workflow_base.eflint new file mode 100644 index 00000000..5272ad6b --- /dev/null +++ b/brane-chk/policy/workflow_base.eflint @@ -0,0 +1,50 @@ +// WORKFLOW BASE.eflint +// by Lut99 +// +// Created: +// 30 Nov 2023, 11:14:15 +// Last edited: +// 07 Dec 2023, 13:32:48 +// Auto updated? +// Yes +// +// Description: +// Defines the `Fact`s that we use to express a Checker Workflow in. +// + +#require "state.eflint". + + +///// DEFINITIONS ///// +// Declares a particular workflow. +Fact workflow. +// Defines a dataset that is the product of this workflow. +Fact workflow-result Identified by workflow * asset. +// Names a dataset that is received by the submitter of the workflow. +Fact workflow-result-recipient Identified by workflow-result * user. + +// Declares a node in a workflow, which is either an executable `task` or the publication of a dataset (`commit`). Given as a pair of a workflow and the node ID, as it models a call to something and that's always in the context of a particular workflow. +Fact node Identified by workflow * string. +// Relates _data_ as _possible_ input to a node. Whether this data is actually given as input depends on dynamic control flow. See `node-input-ensured` to find out which inputs are given in \*all\* control flow paths. +Fact node-input Identified by node * asset. +// Relates an input to a domain where it will be downloaded from. Can be at most 1. +Fact node-input-from Identified by node-input * domain. +// Relates asset as output to a node. Can be at most 1. +Fact node-output Identified by node * asset. +// Defines where a particular node is executed. Can be at most 1. +Fact node-at Identified by node * domain. + +// Declares a particular call of a function. +Fact task Identified by node. +// Determines that a particular input asset acts as the code for that asset. Given as the input and the name of the function to call in that input. +Fact function Identified by node-input * string. + +// Declares the publication of a particular dataset. +Fact commit Identified by node. + +// Declares a node that is a stand-in for a subgraph that is repeatedly executed. +// +// The input of the loop acts as the input of the `loop-body`'s first node, and the output corresponds to the output of the `loop-body`'s last node(s). +Fact loop Identified by node. +// Links a `loop` node to the body that is repeatedly executed. +Fact loop-body Identified by loop * node. diff --git a/brane-chk/policy/workflow_ext.eflint b/brane-chk/policy/workflow_ext.eflint new file mode 100644 index 00000000..4a21b732 --- /dev/null +++ b/brane-chk/policy/workflow_ext.eflint @@ -0,0 +1,64 @@ +// WORKFLOW EXT.eflint +// by Lut99 +// +// Created: +// 30 Nov 2023, 11:33:50 +// Last edited: +// 13 Dec 2023, 16:13:19 +// Auto updated? +// Yes +// +// Description: +// Defines Facts that can be derived from every workflow. Not part of the +// standard interface, but more like standard library. +// + +#require "workflow_base.eflint". + + +// Automatically derives that a node exists if a task, commit or loop exists. +Extend Fact node + Derived from (Foreach task : task.node) + Derived from (Foreach commit : commit.node) + Derived from (Foreach loop : loop.node). + +// Unifies inputs and outputs of a node. +Fact node-asset Identified by node * asset + Holds when (node-input(node, asset) || node-output(node, asset)). +// Relates _code_ as what is being executed in a task. Can be at most 1. +Fact task-code Identified by task * code + Derived from (Foreach node-input : task-code(task(node-input.node), code(node-input.asset)) When (task(node-input.node) && code(node-input.asset))). + +// Compute the assets on all domains that compute stuff +Extend Fact asset-access + Derived from (Foreach node-input-from : asset-access((node-input-from.node-input).asset, (node-input-from.domain).user)) + Derived from (Foreach node, asset, domain : asset-access(asset, domain.user) When ((node-asset(node, asset) || (task(node) && task-code(task(node), code(asset)))) && node-at(node, domain))). + +// Indicates that the input of the first node \*might\* depend on the output of the second. Whether this is actually the case depends on dynamic control flow. +Fact node-depends-on Identified by node1 * node2 + Conditioned by ((node1.workflow) == (node2.workflow)) + Holds when node1 == node2 + Holds when (Exists asset : node-input(node1, asset) && node-output(node2, asset)) + Holds when (Exists node3 : node-depends-on(node1, node3) && node-depends-on(node3, node2)). +// Derives a concrete relation of all the assets upon which a node \*might\* depend. +Fact node-depends-on-asset Identified by node * asset + Derived from (Foreach node-depends-on, asset : + node-depends-on-asset(node-depends-on.node1, asset) + When node-input(node-depends-on.node2, asset) + ). +// Derives a concrete relation of all the domains upon which a node \*might\* depend (i.e., where it's executed and gets all its input from). +Fact node-depends-on-domain Identified by node * domain + Derived from (Foreach node-at : node-depends-on-domain(node-at.node, node-at.domain)) + Derived from (Foreach node-input-from : node-depends-on-domain((node-input-from.node-input).node, node-input-from.domain)). + +// Derives all nodes that are dependencies of particular assets +Fact asset-dependency-of-node Identified by asset * node + Derived from (Foreach node-depends-on-asset : asset-dependency-of-node(node-depends-on-asset.asset, node-depends-on-asset.node)). +// Derives all the domains to which an asset downstreams. +Fact asset-downstream-domain Identified by asset * domain + // An asset touches the domains from which it is downloaded + Derived from (Foreach node-input-from : asset-downstream-domain((node-input-from.node-input).asset, node-input-from.domain)) + // An asset touches the domain of a task it's input or output of + Derived from (Foreach node-asset, domain : asset-downstream-domain(node-asset.asset, domain) When node-at(node-asset.node, domain)) + // An asset touches the domains of all nodes that depend on it (since they need to see (a result of) it somehow) + Derived from (Foreach asset-dependency-of-node, domain : asset-downstream-domain(asset-dependency-of-node.asset, domain) When node-at(asset-dependency-of-node.node, domain)). diff --git a/brane-chk/src/apis/deliberation.rs b/brane-chk/src/apis/deliberation.rs new file mode 100644 index 00000000..ab61c01c --- /dev/null +++ b/brane-chk/src/apis/deliberation.rs @@ -0,0 +1,492 @@ +// SERVER.rs +// by Lut99 +// +// Created: +// 28 Oct 2024, 20:44:52 +// Last edited: +// 02 May 2025, 15:01:31 +// Auto updated? +// Yes +// +// Description: +//! Implements the webserver for the deliberation API. +// + +use std::future::Future; +use std::net::SocketAddr; +use std::path::Path; +use std::sync::Arc; + +use axum::body::{Body, Bytes}; +use axum::extract::connect_info::IntoMakeServiceWithConnectInfo; +use axum::extract::{ConnectInfo, Request, State}; +use axum::http::StatusCode; +use axum::middleware::Next; +use axum::response::Response; +use axum::routing::on; +use axum::{Extension, Router}; +use error_trace::{ErrorTrace as _, Trace, trace}; +use futures::StreamExt as _; +use hyper::body::Incoming; +use hyper_util::rt::{TokioExecutor, TokioIo}; +use hyper_util::server::conn::auto::Builder as HyperBuilder; +use policy_reasoner::spec::auditlogger::SessionedAuditLogger; +use policy_reasoner::spec::{AuditLogger, ReasonerConnector, StateResolver}; +use policy_store::auth::jwk::JwkResolver; +use policy_store::auth::jwk::keyresolver::KidResolver; +use policy_store::databases::sqlite::SQLiteDatabase; +use policy_store::spec::AuthResolver as _; +use policy_store::spec::authresolver::HttpError; +use policy_store::spec::metadata::User; +use rand::Rng; +use rand::distributions::Alphanumeric; +use serde::Serialize; +use serde::de::DeserializeOwned; +use specifications::checking::deliberation::{ + CHECK_TASK_PATH, CHECK_TRANSFER_PATH, CHECK_WORKFLOW_PATH, CheckResponse, CheckTaskRequest, CheckTransferRequest, CheckWorkflowRequest, +}; +use thiserror::Error; +use tokio::net::{TcpListener, TcpStream}; +use tower_service::Service as _; +use tracing::field::Empty; +use tracing::{Instrument as _, Level, debug, error, info, span}; + +use crate::stateresolver::{Input, QuestionInput}; +use crate::workflow::compile::pc_to_id; + + +/***** CONSTANTS *****/ +/// The initiator claim that must be given in the input header token. +pub const INITIATOR_CLAIM: &'static str = "username"; + + + + + +/***** ERRORS *****/ +/// Defines errors originating from the bowels of the [`Deliberation`]. +#[derive(Debug, Error)] +pub enum Error { + #[error("Failed to create the KID resolver")] + KidResolver { + #[source] + err: policy_store::auth::jwk::keyresolver::kid::ServerError, + }, + #[error("Failed to bind server on address '{addr}'")] + ListenerBind { + addr: SocketAddr, + #[source] + err: std::io::Error, + }, +} + + + + + +/***** HELPER FUNCTIONS *****/ +/// Turns the given [`Request`] into a deserialized object. +/// +/// This is done instead of using the [`Json`](axum::extract::Json) extractor because we want to +/// log the raw inputs upon failure. +/// +/// # Generics +/// - `T`: The thing to deserialize to. +/// +/// # Arguments +/// - `request`: The [`Request`] to download and turn into JSON. +/// +/// # Returns +/// A parsed `T`. +/// +/// # Errors +/// This function errors if we failed to download the request body, or it was not valid JSON. +async fn download_request(request: Request) -> Result { + // Download the entire request first + let mut req: Vec = Vec::new(); + let mut request = request.into_body().into_data_stream(); + while let Some(next) = request.next().await { + // Unwrap the chunk + let next: Bytes = match next { + Ok(next) => next, + Err(err) => { + let msg: &'static str = "Failed to download request body"; + error!("{}", trace!(("{msg}"), err)); + return Err((StatusCode::INTERNAL_SERVER_ERROR, msg.into())); + }, + }; + + // Append it + req.extend(next); + } + + // Deserialize the request contents + match serde_json::from_slice(&req) { + Ok(req) => Ok(req), + Err(err) => { + let error: String = format!( + "{}Raw body:\n{}\n{}\n{}\n", + trace!(("Failed to deserialize request body"), err), + (0..80).map(|_| '-').collect::(), + String::from_utf8_lossy(&req), + (0..80).map(|_| '-').collect::() + ); + info!("{error}"); + Err((StatusCode::BAD_REQUEST, error)) + }, + } +} + + + + + +/***** LIBRARY *****/ +/// Defines a Brane-compliant deliberation API server. +pub struct Deliberation { + /// The address on which to bind the server. + addr: SocketAddr, + /// The auth resolver for resolving auth. + auth: JwkResolver, + /// The store for accessing the backend database. + store: Arc>, + /// The state resolver for resolving state. + resolver: S, + /// The reasoner connector for connecting to reasoners. + reasoner: Arc

, + /// The logger for logging! + logger: L, +} +impl Deliberation { + /// Constructor for the Deliberation. + /// + /// # Arguments + /// - `addr`: The address on which to listen once [`serve()`](Deliberation::serve())ing. + /// - `keystore_path`: The path to the keystore file that maps KIDs to the key used for + /// encrypting/decrypting login JWTs. + /// - `store`: A shared ownership of the [`SQLiteDatabase`] that we use for accessing policies. + /// - `resolver`: The [`StateResolver`] used to resolve the state in the given requests. + /// - `reasoner`: The [`ReasonerConnector`] used to interact with the backend reasoner. + /// - `logger`: The [`AuditLogger`] that will log what the reasoner is doing. + /// + /// # Returns + /// A new Deliberation, ready to handle requests or something. + #[inline] + pub fn new( + addr: impl Into, + keystore_path: impl AsRef, + store: Arc>, + resolver: S, + reasoner: Arc

, + logger: L, + ) -> Result { + // Attempt to create the KidResolver + let kid = match KidResolver::new(keystore_path) { + Ok(res) => res, + Err(err) => return Err(Error::KidResolver { err }), + }; + + // If that worked, get kicking + Ok(Self { addr: addr.into(), auth: JwkResolver::new(INITIATOR_CLAIM, kid), store, resolver, reasoner, logger }) + } +} + +// Paths +impl Deliberation +where + S: 'static + Send + Sync + StateResolver, + S::Error: HttpError, + P: 'static + Send + Sync + ReasonerConnector, + P::Reason: Serialize, + L: Send + Sync + AuditLogger, +{ + /// Helper function for handling all three endpoints after the question has been decided. + /// + /// # Arguments + /// - `this`: `self` but in an [`Arc`]. + /// - `reference`: The reference for which this request is being done. + /// - `input`: The [`Input`] that will be resolved to the reasoner input. + /// + /// # Returns + /// The status code of the response and a message to attach to it. + async fn check(this: Arc, reference: &str, input: Input) -> (StatusCode, String) { + // Build the state, then resolve it + let (state, question): (P::State, P::Question) = match this.resolver.resolve(input, &SessionedAuditLogger::new(reference, &this.logger)).await + { + Ok(state) => state, + Err(err) => { + let status = err.status_code(); + let err = Trace::from_source("Failed to resolve input to the reasoner", err); + error!("{}", err.trace()); + return (status, err.to_string()); + }, + }; + + // With that in order, hit the reasoner + match this.reasoner.consult(state, question, &SessionedAuditLogger::new(reference, &this.logger)).await { + Ok(res) => { + // Serialize the response + let res: String = match serde_json::to_string(&CheckResponse { verdict: res }) { + Ok(res) => res, + Err(err) => { + let err = Trace::from_source("Failed to serialize reasoner response", err); + error!("{}", err.trace()); + return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()); + }, + }; + + // OK + (StatusCode::OK, res) + }, + Err(err) => { + let err = Trace::from_source("Failed to consult with the reasoner", err); + error!("{}", err.trace()); + (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()) + }, + } + } + + /// Authorization middle layer for the Deliberation. + /// + /// This will read the `Authorization` header in the incoming request for a token that + /// identifies the user. The request will be interrupted if the token is missing, invalid or + /// not (properly) signed. + async fn authorize(State(context): State>, ConnectInfo(client): ConnectInfo, mut request: Request, next: Next) -> Response { + let _span = span!(Level::INFO, "Deliberation::authorize", client = client.to_string()); + + // Do the auth thingy + let user: User = match context.auth.authorize(request.headers()).await { + Ok(Ok(user)) => user, + Ok(Err(err)) => { + let status = err.status_code(); + let err = Trace::from_source("Failed to authorize incoming request", err); + info!("{}", err.trace()); + let mut res = + Response::new(Body::from(serde_json::to_string(&err.freeze()).unwrap_or_else(|err| panic!("Failed to serialize Trace: {err}")))); + *res.status_mut() = status; + return res; + }, + Err(err) => { + let err = Trace::from_source("Failed to authorize incoming request", err); + error!("{}", err.trace()); + let mut res = Response::new(Body::from(err.to_string())); + *res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; + return res; + }, + }; + + // If we found a context, then inject it in the request as an extension; then continue + request.extensions_mut().insert(user); + next.run(request).await + } + + /// Handler for `GET /v2/workflow` (i.e., checking a whole workflow). + /// + /// In: + /// - [`CheckWorkflowRequest`]. + /// + /// Out: + /// - 200 OK with an [`CheckResponse`] detailling the verdict of the reasoner; + /// - 400 BAD REQUEST with the reason why we failed to parse the request; + /// - 404 NOT FOUND if the given use-case was unknown; or + /// - 500 INTERNAL SERVER ERROR with a message what went wrong. + fn check_workflow( + State(this): State>, + Extension(auth): Extension, + request: Request, + ) -> impl Send + Future { + let reference: Arc = + Arc::new(format!("{}-{}", auth.id, rand::thread_rng().sample_iter(Alphanumeric).take(8).map(char::from).collect::())); + let span_ref: Arc = reference.clone(); + async move { + // Get the request + let req: CheckWorkflowRequest = match download_request(request).await { + Ok(req) => req, + Err(res) => return res, + }; + + // Decide the input + let input: Input = + Input { store: this.store.clone(), usecase: req.usecase, workflow: req.workflow, input: QuestionInput::ValidateWorkflow }; + + // Continue with the agnostic function for maintainability + Self::check(this, reference.as_str(), input).await + } + .instrument(span!(Level::INFO, "Deliberation::check_workflow", user = auth.id, reference = *span_ref)) + } + + /// Handler for `GET /v2/task` (i.e., checking a task in a workflow). + /// + /// In: + /// - [`CheckTaskRequest`]. + /// + /// Out: + /// - 200 OK with an [`CheckResponse`] detailling the verdict of the reasoner; + /// - 404 BAD REQUEST with the reason why we failed to parse the request; or + /// - 500 INTERNAL SERVER ERROR with a message what went wrong. + fn check_task( + State(this): State>, + Extension(auth): Extension, + request: Request, + ) -> impl Send + Future { + let reference: Arc = + Arc::new(format!("{}-{}", auth.id, rand::thread_rng().sample_iter(Alphanumeric).take(8).map(char::from).collect::())); + let span_ref: Arc = reference.clone(); + async move { + // Get the request + let req: CheckTaskRequest = match download_request(request).await { + Ok(req) => req, + Err(res) => return res, + }; + + // Decide the input + let task_id: String = pc_to_id(&req.workflow, req.task); + let input: Input = Input { + store: this.store.clone(), + usecase: req.usecase, + workflow: req.workflow, + input: QuestionInput::ExecuteTask { task: task_id }, + }; + + // Continue with the agnostic function for maintainability + Self::check(this, reference.as_str(), input).await + } + .instrument(span!(Level::INFO, "Deliberation::check_task", user = auth.id, reference = *span_ref)) + } + + /// Handler for `GET /v2/transfer` (i.e., checking a transfer for a task in a workflow). + /// + /// In: + /// - [`CheckTransferRequest`]. + /// + /// Out: + /// - 200 OK with an [`CheckResponse`] detailling the verdict of the reasoner; + /// - 404 BAD REQUEST with the reason why we failed to parse the request; or + /// - 500 INTERNAL SERVER ERROR with a message what went wrong. + fn check_transfer( + State(this): State>, + Extension(auth): Extension, + request: Request, + ) -> impl Send + Future { + let reference: Arc = + Arc::new(format!("{}-{}", auth.id, rand::thread_rng().sample_iter(Alphanumeric).take(8).map(char::from).collect::())); + let span_ref: Arc = reference.clone(); + async move { + // Get the request + let req: CheckTransferRequest = match download_request(request).await { + Ok(req) => req, + Err(res) => return res, + }; + + // Decide the input + let input: Input = if let Some(task) = req.task { + let task_id: String = pc_to_id(&req.workflow, task); + Input { + store: this.store.clone(), + usecase: req.usecase, + workflow: req.workflow, + input: QuestionInput::TransferInput { task: task_id, input: req.input }, + } + } else { + Input { + store: this.store.clone(), + usecase: req.usecase, + workflow: req.workflow, + input: QuestionInput::TransferResult { result: req.input }, + } + }; + + // Continue with the agnostic function for maintainability + Self::check(this, reference.as_str(), input).await + } + .instrument(span!(Level::INFO, "Deliberation::check_transfer", user = auth.id, reference = *span_ref)) + } +} + +// Serve +impl Deliberation +where + S: 'static + Send + Sync + StateResolver, + S::Error: HttpError, + P: 'static + Send + Sync + ReasonerConnector, + P::Reason: Serialize, + L: 'static + Send + Sync + AuditLogger, +{ + /// Runs this server. + /// + /// This will hijack the current codeflow and keep serving the server until the end of the + /// universe! ...or until the server quits. + /// + /// In case of the latter, the thread just returns. + /// + /// # Errors + /// This function may error if the server failed to listen of if a fatal server errors comes + /// along as it serves. However, client-side errors should not trigger errors at this level. + pub fn serve(self) -> impl Future> { + let this: Arc = Arc::new(self); + async move { + let span = span!(Level::INFO, "Deliberation::serve", state = "starting", client = Empty); + + // First, define the axum paths + debug!("Building axum paths..."); + let check_workflow: Router = Router::new() + .route(CHECK_WORKFLOW_PATH.path, on(CHECK_WORKFLOW_PATH.method.try_into().unwrap(), Self::check_workflow)) + .layer(axum::middleware::from_fn_with_state(this.clone(), Self::authorize)) + .with_state(this.clone()); + let check_task: Router = Router::new() + .route(CHECK_TASK_PATH.path, on(CHECK_TASK_PATH.method.try_into().unwrap(), Self::check_task)) + .layer(axum::middleware::from_fn_with_state(this.clone(), Self::authorize)) + .with_state(this.clone()); + let check_transfer: Router = Router::new() + .route(CHECK_TRANSFER_PATH.path, on(CHECK_TRANSFER_PATH.method.try_into().unwrap(), Self::check_transfer)) + .layer(axum::middleware::from_fn_with_state(this.clone(), Self::authorize)) + .with_state(this.clone()); + let router: IntoMakeServiceWithConnectInfo = + Router::new().nest("/", check_workflow).nest("/", check_task).nest("/", check_transfer).into_make_service_with_connect_info(); + + // Bind the TCP Listener + debug!("Binding server on '{}'...", this.addr); + let listener: TcpListener = match TcpListener::bind(this.addr).await { + Ok(listener) => listener, + Err(err) => return Err(Error::ListenerBind { addr: this.addr, err }), + }; + + // Accept new connections! + info!("Initialization OK, awaiting connections..."); + span.record("state", "running"); + loop { + // Accept a new connection + let (socket, remote_addr): (TcpStream, SocketAddr) = match listener.accept().await { + Ok(res) => res, + Err(err) => { + error!("{}", trace!(("Failed to accept incoming connection"), err)); + continue; + }, + }; + span.record("client", remote_addr.to_string()); + + // Move the rest to a separate task + let router: IntoMakeServiceWithConnectInfo<_, _> = router.clone(); + tokio::spawn(async move { + debug!("Handling incoming connection from '{remote_addr}'"); + + // Build the service + let service = hyper::service::service_fn(|request: Request| { + // Sadly, we must `move` again because this service could be called multiple times (at least according to the typesystem) + let mut router = router.clone(); + async move { + // SAFETY: We can call `unwrap()` because the call returns an infallible. + router.call(remote_addr).await.unwrap().call(request).await + } + }); + + // Create a service that handles this for us + let socket: TokioIo<_> = TokioIo::new(socket); + if let Err(err) = HyperBuilder::new(TokioExecutor::new()).serve_connection_with_upgrades(socket, service).await { + error!("{}", trace!(("Failed to serve incoming connection"), *err)); + } + }); + } + } + } +} diff --git a/brane-chk/src/apis/mod.rs b/brane-chk/src/apis/mod.rs new file mode 100644 index 00000000..dfdbd936 --- /dev/null +++ b/brane-chk/src/apis/mod.rs @@ -0,0 +1,21 @@ +// MOD.rs +// by Lut99 +// +// Created: +// 02 Dec 2024, 13:58:11 +// Last edited: +// 02 Dec 2024, 15:25:04 +// Auto updated? +// Yes +// +// Description: +//! Defines the `brane-chk` unique APIs that are implemented. +// + +// Declare the modules +pub mod deliberation; +pub mod reasoner; + +// Use some of it into this module's namespace +pub use deliberation::Deliberation; +pub use reasoner::inject_reasoner_api; diff --git a/brane-chk/src/apis/reasoner.rs b/brane-chk/src/apis/reasoner.rs new file mode 100644 index 00000000..47490522 --- /dev/null +++ b/brane-chk/src/apis/reasoner.rs @@ -0,0 +1,110 @@ +// REASONER.rs +// by Lut99 +// +// Created: +// 02 Dec 2024, 14:00:06 +// Last edited: +// 29 Apr 2025, 23:33:56 +// Auto updated? +// Yes +// +// Description: +//! Implements an API for getting non-"public" (deliberation) +//! information that is beyond the store API. +// + +use std::future::Future; +use std::net::SocketAddr; +use std::sync::Arc; + +use axum::extract::State; +use axum::routing::get; +use axum::{Extension, Router}; +use error_trace::{ErrorTrace as _, Trace}; +use hyper::StatusCode; +use policy_reasoner::spec::ReasonerConnector; +use policy_store::servers::axum::AxumServer; +use policy_store::spec::AuthResolver; +use policy_store::spec::metadata::User; +use specifications::checking::store::{EFlintHaskellReasonerWithInterfaceContext, GetContextResponse}; +use thiserror::Error; +use tracing::{Instrument as _, Level, debug, error, span}; + + +/***** ERRORS *****/ +/// Defines the errors originating in the [`Reasoner`] API. +#[derive(Debug, Error)] +pub enum Error { + #[error("Failed to create the KID resolver")] + KidResolver { + #[source] + err: policy_store::auth::jwk::keyresolver::kid::ServerError, + }, + #[error("Failed to bind server on address '{addr}'")] + ListenerBind { + addr: SocketAddr, + #[source] + err: std::io::Error, + }, +} + + + + + +/***** LIBRARY *****/ +/// Handler for `GET /v2/context` (i.e., retrieving reasoner context). +/// +/// Out: +/// - 200 OK with a [`ContextResponse`] detailling the relevant reasoner information; or +/// - 500 INTERNAL SERVER ERROR with a message what went wrong. +pub fn get_context(State(this): State>, Extension(auth): Extension) -> impl Send + Future +where + R: Send + Sync + ReasonerConnector, +{ + async move { + // Generate the context + let res: GetContextResponse = GetContextResponse { context: this.context() }; + + // Serialize and send back + match serde_json::to_string(&res) { + Ok(res) => (StatusCode::OK, res), + Err(err) => { + let err = Trace::from_source("Failed to serialize context", err); + error!("{}", err.trace()); + (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()) + }, + } + } + .instrument(span!(Level::INFO, "Reasoner::get_context", user = auth.id)) +} + +/// Given a [`Router`], injects the [`get_context()`]-path into it. +/// +/// # Arguments +/// - `server`: The already existing [`AxumServer`] that is also the state to give to the +/// auth function. +/// - `reasoner`: Some [`ReasonerConnector`] that can provide us with the context to provide. +/// - `router`: A [`Router`] to inject with the path. +/// +/// # Returns +/// A new [`Router`] that is the same but with the new path in it. +pub fn inject_reasoner_api(server: Arc>, reasoner: Arc, router: Router<()>) -> Router<()> +where + A: 'static + Send + Sync + AuthResolver, + A::Context: 'static + Send + Sync + Clone, + A::ClientError: 'static, + A::ServerError: 'static, + D: 'static + Send + Sync, + R: 'static + Send + Sync + ReasonerConnector, +{ + let _span = span!(Level::INFO, "inject_reasoner_api()"); + + // First, define the axum paths + debug!("Injecting additional axum paths..."); + let get_context: Router = Router::new() + .route("/context", get(get_context::)) + .layer(axum::middleware::from_fn_with_state(server, policy_store::servers::axum::AxumServer::check)) + .with_state(reasoner.clone()); + router.nest("/v2/", get_context) +} diff --git a/brane-chk/src/lib.rs b/brane-chk/src/lib.rs new file mode 100644 index 00000000..6d0d66f1 --- /dev/null +++ b/brane-chk/src/lib.rs @@ -0,0 +1,22 @@ +// LIB.rs +// by Lut99 +// +// Created: +// 17 Oct 2024, 16:07:19 +// Last edited: +// 29 Apr 2025, 13:52:43 +// Auto updated? +// Yes +// +// Description: +//! The checker is the entity in the Brane system that is responsible +//! for consulting a backend reasoner. In XACML terms, it might be +//! called a Policy Decision Point (PDP). +// + +// Declare modules +pub mod apis; +pub mod question; +pub mod reasonerconn; +pub mod stateresolver; +pub mod workflow; diff --git a/brane-chk/src/main.rs b/brane-chk/src/main.rs new file mode 100644 index 00000000..18f05334 --- /dev/null +++ b/brane-chk/src/main.rs @@ -0,0 +1,204 @@ +// MAIN.rs +// by Lut99 +// +// Created: +// 17 Oct 2024, 16:13:06 +// Last edited: +// 01 May 2025, 16:24:27 +// Auto updated? +// Yes +// +// Description: +//! The checker is the entity in the Brane system that is responsible +//! for consulting a backend reasoner. In XACML terms, it might be +//! called a Policy Decision Point (PDP). +// + +// Declare modules +pub mod apis; +pub mod question; +pub mod reasonerconn; +pub mod stateresolver; +pub mod workflow; + +use std::borrow::Cow; +// Imports +use std::net::SocketAddr; +use std::path::PathBuf; +use std::sync::Arc; + +use axum::Router; +use brane_cfg::info::Info; +use brane_cfg::node::{NodeConfig, NodeSpecificConfig, WorkerConfig}; +use brane_chk::apis::{Deliberation, inject_reasoner_api}; +use brane_chk::reasonerconn::EFlintHaskellReasonerConnectorWithInterface; +use brane_chk::stateresolver::BraneStateResolver; +use clap::Parser; +use enum_debug::EnumDebug as _; +use error_trace::trace; +use policy_reasoner::loggers::file::FileLogger; +use policy_reasoner::reasoners::eflint_haskell::reasons::PrefixedHandler; +use policy_reasoner::spec::reasonerconn::ReasonerConnector as _; +use policy_store::auth::jwk::JwkResolver; +use policy_store::auth::jwk::keyresolver::KidResolver; +use policy_store::databases::sqlite::SQLiteDatabase; +use policy_store::servers::axum::AxumServer; +use tracing::{Level, error, info}; + + +/***** ARGUMENTS *****/ +#[derive(Debug, Parser)] +struct Arguments { + /// Whether to enable TRACE-level debug statements. + #[clap(long)] + trace: bool, + + /// Node config store. + #[clap( + short = 'n', + long, + default_value = "./node.yml", + help = "The path to the node environment configuration. For the checker, this ONLY defines the usecase mapping. The rest is given directly \ + as arguments (but probably via `branectl`).", + env = "NODE_CONFIG_PATH" + )] + node_config_path: PathBuf, + + /// The address of the deliberation API on which to serve. + #[clap(short = 'a', long, default_value = "127.0.0.1:50053", env = "DELIB_ADDRESS")] + delib_addr: SocketAddr, + /// The address of the store API on which to serve. + #[clap(short = 'A', long, default_value = "127.0.0.1:50054", env = "STORE_ADDRESS")] + store_addr: SocketAddr, + + /// The path to the deliberation API keystore. + #[clap(short = 'k', long, default_value = "./delib_keys.json", env = "POLICY_DELIB_KEYS_PATH")] + delib_keys: PathBuf, + /// The path to the store API keystore. + #[clap(short = 'K', long, default_value = "./store_keys.json", env = "POLICY_STORE_KEYS_PATH")] + store_keys: PathBuf, + + /// The path to the output log file. + #[clap(short = 'l', long, default_value = "./checker.log", env = "LOG_PATH")] + log_path: PathBuf, + /// The path to the database file. + #[clap(short = 'd', long, default_value = "./policies.db", env = "POLICY_DB_PATH")] + database_path: PathBuf, + /// The command of the eFLINT REPL to spawn. + #[clap(short = 'b', long, default_value = "eflint-repl")] + backend_cmd: String, + /// The path to the base policy file to load. This is prefixed to every question and runtime context. + #[clap(short = 'p', long, default_value = "./policy.eflint", env = "POLICY_FILE")] + policy: PathBuf, + /// Any prefix that, when given, reveals certain violations. + #[clap(short = 'P', long, default_value = "pub-", env = "POLICY_PREFIX")] + prefix: String, +} + + + + + +/***** ENTRYPOINT *****/ +#[tokio::main(flavor = "multi_thread")] +async fn main() { + // Parse the arguments + let args = Arguments::parse(); + + // Setup the logger + tracing_subscriber::fmt().with_max_level(if args.trace { Level::TRACE } else { Level::DEBUG }).init(); + info!("{} - v{}", env!("CARGO_BIN_NAME"), env!("CARGO_PKG_VERSION")); + + + /* Step 1: Prepare the servers */ + // Read the node YAML file. + let node: WorkerConfig = match NodeConfig::from_path_async(&args.node_config_path).await { + Ok(node) => match node.node { + NodeSpecificConfig::Worker(cfg) => cfg, + other => { + error!("Found node.yml for a {}, expected a Worker", other.variant()); + std::process::exit(1); + }, + }, + Err(err) => { + error!("{}", trace!(("Failed to lode node config file '{}'", args.node_config_path.display()), err)); + std::process::exit(1); + }, + }; + + // Setup the logger + let logger: FileLogger = FileLogger::new(format!("{} - v{}", env!("CARGO_BIN_NAME"), env!("CARGO_PKG_VERSION")), args.log_path); + + // Setup the reasoner connector + let reasoner: Arc = match EFlintHaskellReasonerConnectorWithInterface::new_async( + shlex::split(&args.backend_cmd).unwrap_or_else(|| vec![args.backend_cmd]), + args.policy, + PrefixedHandler::new(Cow::Owned(args.prefix)), + &logger, + ) + .await + { + Ok(reasoner) => Arc::new(reasoner), + Err(err) => { + error!("{}", trace!(("Failed to create EFlintHaskellReasonerConnectorWithInterface"), err)); + std::process::exit(1); + }, + }; + + // Setup the state resolver + let resolver: BraneStateResolver = BraneStateResolver::new(node.usecases, &reasoner.reasoner.context().base_policy_hash); + + // Setup the database connection + let conn: Arc> = match SQLiteDatabase::new_async(&args.database_path, policy_store::databases::sqlite::MIGRATIONS).await { + Ok(conn) => Arc::new(conn), + Err(err) => { + error!("{}", trace!(("Failed to setup connection to SQLiteDatabase '{}'", args.database_path.display()), err)); + std::process::exit(1); + }, + }; + + + + /* Step 2: Setup the deliberation & store APIs */ + // Deliberation + let delib: Deliberation<_, _, _> = match Deliberation::new(args.delib_addr, &args.delib_keys, conn.clone(), resolver, reasoner.clone(), logger) { + Ok(server) => server, + Err(err) => { + error!("{}", trace!(("Failed to create deliberation API server"), err)); + std::process::exit(1); + }, + }; + + // Store + let resolver: KidResolver = match KidResolver::new(&args.store_keys) { + Ok(resolver) => resolver, + Err(err) => { + error!("{}", trace!(("Failed to create KidResolver with file {:?}", args.store_keys.display()), err)); + std::process::exit(1); + }, + }; + let store: Arc> = Arc::new(AxumServer::new(args.store_addr, JwkResolver::new("username", resolver), conn)); + + // Also inject the reasoner context endpoint + let paths: Router<()> = inject_reasoner_api(store.clone(), reasoner, AxumServer::routes(store.clone())); + + + + /* Step 3: Host them concurrently */ + tokio::select! { + res = delib.serve() => match res { + Ok(_) => info!("Terminated."), + Err(err) => { + error!("{}", trace!(("Failed to host deliberation API"), err)); + std::process::exit(1); + } + }, + res = AxumServer::serve_router(store, paths) => match res { + Ok(_) => info!("Terminated."), + Err(err) => { + error!("{}", trace!(("Failed to host store API"), err)); + std::process::exit(1); + } + }, + } +} diff --git a/brane-chk/src/question.rs b/brane-chk/src/question.rs new file mode 100644 index 00000000..a560afe3 --- /dev/null +++ b/brane-chk/src/question.rs @@ -0,0 +1,88 @@ +// STATE.rs +// by Lut99 +// +// Created: +// 17 Oct 2024, 16:10:59 +// Last edited: +// 02 May 2025, 14:59:38 +// Auto updated? +// Yes +// +// Description: +//! Defines the Brane's checker's state. +// + +use std::fmt::{Formatter, Result as FResult}; + +use policy_reasoner::reasoners::eflint_haskell::spec::EFlintable; +use policy_reasoner::workflow::{Entity, Workflow}; +use serde::{Deserialize, Serialize}; + +use crate::workflow; + + +/***** LIBRARY *****/ +/// Defines the question (=request specific input) for the Brane reasoner. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub enum Question { + /// Checks if this domain agrees with the workflow as a whole. + ValidateWorkflow { + /// The workflow that we want to validate. + workflow: Workflow, + }, + /// Checks if this domain agrees with executing the given task in the given workflow. + ExecuteTask { + /// The workflow that we want to validate. + workflow: Workflow, + /// The task that we specifically want to validate within that workflow. + task: String, + }, + /// Checks if this domain agrees with providing the given input to the given task in the given workflow. + TransferInput { + /// The workflow that we want to validate. + workflow: Workflow, + /// The task that we specifically want to validate within that workflow. + task: String, + /// The input to that task that we want to validate. + input: String, + }, + /// Checks if a domain agrees with providing the given result to the end user of the workflow. + TransferResult { + /// The workflow that we want to validate. + workflow: Workflow, + /// The input to that task that we want to validate. + result: String, + }, +} +impl EFlintable for Question { + #[inline] + fn eflint_fmt(&self, f: &mut Formatter) -> FResult { + match self { + Self::ValidateWorkflow { workflow } => { + workflow::eflint_fmt(workflow, f)?; + writeln!(f, "?workflow-to-execute(workflow({:?}))", workflow.id)?; + Ok(()) + }, + Self::ExecuteTask { workflow, task } => { + workflow::eflint_fmt(workflow, f)?; + writeln!(f, "?task-to-execute(task(node(workflow({:?}), {:?})))", workflow.id, task)?; + Ok(()) + }, + Self::TransferInput { workflow, task, input } => { + workflow::eflint_fmt(workflow, f)?; + writeln!(f, "?dataset-to-transfer(node-input(node(workflow({:?}), {:?}), asset({:?})))", workflow.id, task, input)?; + Ok(()) + }, + Self::TransferResult { workflow, result } => { + workflow::eflint_fmt(workflow, f)?; + let user: &Entity = workflow.user.as_ref().unwrap_or_else(|| panic!("Cannot ask for a transfer result without a user in workflow")); + writeln!( + f, + "?result-to-transfer(workflow-result-recipient(workflow-result(workflow({:?}), asset({:?})), user({:?})))", + workflow.id, result, user.id + )?; + Ok(()) + }, + } + } +} diff --git a/brane-chk/src/reasonerconn.rs b/brane-chk/src/reasonerconn.rs new file mode 100644 index 00000000..44feb4a2 --- /dev/null +++ b/brane-chk/src/reasonerconn.rs @@ -0,0 +1,93 @@ +// REASONERCONN.rs +// by Lut99 +// +// Created: +// 02 Dec 2024, 15:35:46 +// Last edited: +// 01 May 2025, 15:30:10 +// Auto updated? +// Yes +// +// Description: +//! Defines a wrapper around an [`EFlintJsonReasonerConnector`] that +//! includes a particular policy interface. +// + +use std::future::Future; +use std::path::PathBuf; + +use policy_reasoner::reasoners::eflint_haskell::reasons::{PrefixedHandler, ReasonHandler}; +use policy_reasoner::reasoners::eflint_haskell::{EFlintHaskellReasonerConnector, Error}; +use policy_reasoner::spec::auditlogger::SessionedAuditLogger; +use policy_reasoner::spec::reasonerconn::ReasonerResponse; +use policy_reasoner::spec::{AuditLogger, ReasonerConnector}; +use specifications::checking::store::EFlintHaskellReasonerWithInterfaceContext; +use tracing::{Instrument as _, Level, span}; + +use crate::question::Question; + + +/***** LIBRARY *****/ +/// Wrapper of a [`EFlintHaskellReasonerConnector`] that includes a bit of default interface policy. +#[derive(Clone, Debug)] +pub struct EFlintHaskellReasonerConnectorWithInterface { + /// The actual reasoner. + pub reasoner: EFlintHaskellReasonerConnector, String, Question>, +} +impl EFlintHaskellReasonerConnectorWithInterface { + /// Constructor for the EFlintHaskellReasonerConnectorWithInterface. + /// + /// This constructor logs asynchronously. + /// + /// # Arguments + /// - `cmd`: The command with which to execute the backend `eflint-repl` binary. + /// - `base_policy_path`: A path to an eFLINT file containing the base policy to load. + /// - `handler`: The [`ReasonHandler`] that determines how errors from the reasoners are propagated to the user. + /// - `logger`: A logger to write this reasoner's context to. + /// + /// # Returns + /// A new instance of Self, ready for reasoning. + /// + /// # Errors + /// This function may error if it failed to log to the given `logger`. + /// + /// # Panics + /// This function uses the embedded, compiled eFLINT base code (see the `policy`-directory in + /// its manifest directory). Building the reasoner will trigger the first load, if any, + /// and this may panic if the input is somehow ill-formed. + #[inline] + pub fn new_async<'l, L: AuditLogger>( + cmd: impl 'l + IntoIterator, + base_policy_path: impl 'l + Into, + handler: PrefixedHandler<'static>, + logger: &'l L, + ) -> impl 'l + Future> { + async move { Ok(Self { reasoner: EFlintHaskellReasonerConnector::new_async(cmd, base_policy_path, handler, logger).await? }) } + } +} +impl ReasonerConnector for EFlintHaskellReasonerConnectorWithInterface { + type Context = EFlintHaskellReasonerWithInterfaceContext; + type Error = Error; + type Question = Question; + type Reason = as ReasonHandler>::Reason; + type State = String; + + fn context(&self) -> Self::Context { self.reasoner.context() } + + fn consult<'a, L>( + &'a self, + mut state: Self::State, + question: Self::Question, + logger: &'a SessionedAuditLogger, + ) -> impl 'a + Send + Future, Self::Error>> + where + L: Sync + AuditLogger, + { + async move { + // Then run the normal one + state.push('\n'); + self.reasoner.consult(state, question, logger).await + } + .instrument(span!(Level::INFO, "EFlintJsonReasonerConnectorWithInterface::consult", reference = logger.reference())) + } +} diff --git a/brane-chk/src/state.rs b/brane-chk/src/state.rs new file mode 100644 index 00000000..7ece771f --- /dev/null +++ b/brane-chk/src/state.rs @@ -0,0 +1,39 @@ +// STATE.rs +// by Lut99 +// +// Created: +// 17 Oct 2024, 16:10:59 +// Last edited: +// 17 Oct 2024, 16:26:51 +// Auto updated? +// Yes +// +// Description: +//! Defines the Brane's checker's state. +// + +use std::collections::HashMap; + +use policy_reasoner::workflow::Entity; + + +/***** LIBRARY *****/ +/// Defines the state (=request independent input) for the Brane reasoner. +#[derive(Clone, Debug)] +pub struct State { + /// A list of where datasets mentioned in a workflow are currently residing. + pub datasets: HashMap, +} + + + +/// Defines the question (=request specific input) for the Brane reasoner. +#[derive(Clone, Copy, Debug)] +pub enum Question { + /// Checks if this domain agrees with the workflow as a whole. + ValidateWorkflow, + /// Checks if this domain agrees with executing the given task in the given workflow. + ExecuteTask, + /// Checks if this domain agrees with providing the given input to the given task in the given workflow. + TransferInput, +} diff --git a/brane-chk/src/stateresolver.rs b/brane-chk/src/stateresolver.rs new file mode 100644 index 00000000..8185f375 --- /dev/null +++ b/brane-chk/src/stateresolver.rs @@ -0,0 +1,821 @@ +// STATERESOLVER.rs +// by Lut99 +// +// Created: +// 17 Oct 2024, 16:09:36 +// Last edited: +// 02 May 2025, 14:59:31 +// Auto updated? +// Yes +// +// Description: +//! Implements the Brane-specific state resolver. +// + +// use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; +use std::future::Future; +// use std::str::FromStr as _; +use std::sync::{Arc, LazyLock}; + +use brane_cfg::node::WorkerUsecase; +use policy_reasoner::spec::auditlogger::{AuditLogger, SessionedAuditLogger}; +use policy_reasoner::spec::stateresolver::StateResolver; +use policy_reasoner::workflow::visitor::Visitor; +use policy_reasoner::workflow::{Elem, ElemCall, Workflow}; +use policy_store::databases::sqlite::{SQLiteConnection, SQLiteDatabase}; +use policy_store::spec::authresolver::HttpError; +use policy_store::spec::databaseconn::{DatabaseConnection as _, DatabaseConnector as _}; +use policy_store::spec::metadata::{Metadata, User}; +// use reqwest::{Response, StatusCode}; +use reqwest::StatusCode; +// use serde::de::DeserializeOwned; +use specifications::address::Address; +// use specifications::data::DataInfo; +// use specifications::package::PackageIndex; +use specifications::version::Version; +use thiserror::Error; +use tracing::{Level, debug, span, warn}; + +use crate::question::Question; +use crate::workflow::compile; + + +/***** STATICS *****/ +/// The user used to represent ourselves in the backend. +static DATABASE_USER: LazyLock = LazyLock::new(|| User { id: "brane".into(), name: "Brane".into() }); + +/// The special policy that is used when the database doesn't mention any active. +static DENY_ALL_POLICY: &'static str = "Invariant contradiction When False."; + + + + + +/***** ERRORS *****/ +#[derive(Debug, Error)] +pub enum Error { + /// The active version in the backend was not suitable for our reasoner. + #[error("Active version {version} is not compatible with reasoner (policy is for {got:?}, but expected for {expected:?})")] + DatabaseActiveVersionMismatch { version: u64, got: String, expected: String }, + /// Failed to connect to the backend database. + #[error("Failed to connect to the backend database as user 'brane'")] + DatabaseConnect { + #[source] + err: policy_store::databases::sqlite::DatabaseError, + }, + /// Failed to get the active version from the backend database. + #[error("Failed to get the active version from the backend database")] + DatabaseGetActiveVersion { + #[source] + err: policy_store::databases::sqlite::ConnectionError, + }, + /// Failed to get the active version from the backend database. + #[error("Failed to get the contents of active version {version} from the backend database")] + DatabaseGetActiveVersionContent { + version: u64, + #[source] + err: policy_store::databases::sqlite::ConnectionError, + }, + /// Failed to get the metadata of the active version from the backend database. + #[error("Failed to get the metadata of active version {version} from the backend database")] + DatabaseGetActiveVersionMetadata { + version: u64, + #[source] + err: policy_store::databases::sqlite::ConnectionError, + }, + /// The active version reported was not found. + #[error("Inconsistent database version: version {version} was reported as the active version, but that version is not found")] + DatabaseInconsistentActive { version: u64 }, + /// Found too many calls with the same ID. + #[error("Given call ID {call:?} occurs multiple times in workflow {workflow:?}")] + DuplicateCallId { workflow: String, call: String }, + /// Found too many inputs in the given call with the same ID. + #[error("Given input ID {input:?} occurs multiple times in the input to call {call:?} in workflow {workflow:?}")] + DuplicateInputId { workflow: String, call: String, input: String }, + /// Found an illegal version string in a task string. + #[error("Illegal version identifier {version:?} in task {task:?} in call {call:?} in workflow {workflow:?}")] + IllegalVersionFormat { + workflow: String, + call: String, + task: String, + version: String, + #[source] + err: specifications::version::ParseError, + }, + /// Failed to get the package index from the remote registry. + #[error("Failed to get package index from the central registry at {addr:?}")] + PackageIndex { addr: String, err: brane_tsk::api::Error }, + /// Failed to send a request to the central registry. + #[error("Failed to send a request to the central registry at {addr:?} to retrieve {what}")] + Request { + what: &'static str, + addr: String, + #[source] + err: reqwest::Error, + }, + /// The server responded with a non-200 OK exit code. + #[error("Central registry at '{addr}' returned {} ({}) when trying to retrieve {what}{}", status.as_u16(), status.canonical_reason().unwrap_or("???"), if let Some(raw) = raw { format!("\n\nRaw response:\n{}\n{}\n{}\n", (0..80).map(|_| '-').collect::(), raw, (0..80).map(|_| '-').collect::()) } else { String::new() })] + RequestFailure { what: &'static str, addr: String, status: StatusCode, raw: Option }, + /// Failed to resolve the data index with the remote Brane API registry. + #[error("Failed to resolve data with remote Brane registry at {addr:?}")] + ResolveData { + addr: Address, + #[source] + err: brane_tsk::api::Error, + }, + /// Failed to resolve the workflow submitted with the request. + #[error("Failed to resolve workflow '{id}'")] + ResolveWorkflow { + id: String, + #[source] + err: crate::workflow::compile::Error, + }, + /// Failed to deserialize the response of the server. + #[error("Failed to deserialize respones of central registry at {addr:?} as {what}")] + ResponseDeserialize { + what: &'static str, + addr: String, + #[source] + err: serde_json::Error, + }, + /// Failed to download the response of the server. + #[error("Failed to download a {what} response from the central registry at {addr:?}")] + ResponseDownload { + what: &'static str, + addr: String, + #[source] + err: reqwest::Error, + }, + /// A given call ID was not found. + #[error("No call {call:?} exists in workflow {workflow:?}")] + UnknownCall { workflow: String, call: String }, + /// The function called on a package in a call was unknown to that package. + #[error("Unknown function {function:?} in package {package:?} ({version}) in call {call:?} in workflow {workflow:?}")] + UnknownFunction { workflow: String, call: String, package: String, version: Version, function: String }, + /// Some input to a task was unknown to us. + #[error("Unknown input {input:?} to call {call:?} in workflow {workflow:?}")] + UnknownInput { workflow: String, call: String, input: String }, + /// A given input ID was not found in the input to a call. + #[error("No input {input:?} exists as input to call {call:?} in workflow {workflow:?}")] + UnknownInputToCall { workflow: String, call: String, input: String }, + /// The planned user that contibutes an input to a task was unknown to us. + #[error("Unknown user {user:?} providing input {input:?} to call {call:?} in workflow {workflow:?}")] + UnknownInputUser { workflow: String, call: String, input: String, user: String }, + /// The user that owns a tag was unknown to us. + #[error("Unknown user {user:?} owning tag {tag:?} of call {call:?} in workflow {workflow:?}")] + UnknownOwnerUser { workflow: String, call: String, tag: String, user: String }, + /// The package extracted from a call was unknown to us. + #[error("Unknown package {package:?} ({version}) in call {call:?} in workflow {workflow:?}")] + UnknownPackage { workflow: String, call: String, package: String, version: Version }, + /// The planned user of a task was unknown to us. + #[error("Unknown planned user {user:?} in call {call:?} in workflow {workflow:?}")] + UnknownPlannedUser { workflow: String, call: String, user: String }, + /// A package in a task did not have the brane format. + #[error("Task {task:?} in call {call:?} in workflow {workflow:?} does not have the Brane format (\"PACKAGE[VERSION]::FUNCTION\")")] + UnknownTaskFormat { workflow: String, call: String, task: String }, + /// The usecase submitted with the request was unknown. + #[error("Unkown usecase '{usecase}'")] + UnknownUsecase { usecase: String }, + /// The workflow user was not found. + #[error("Unknown workflow user {user:?} in workflow {workflow:?}")] + UnknownWorkflowUser { workflow: String, user: String }, + /// The planned user "contributing" an output was not the planned user of the task. + #[error( + "User {output_user:?} providing output {output:?} to call {call:?} in workflow {workflow:?} is not the user planned to do that task \ + ({planned_user:?})" + )] + UnplannedOutputUser { workflow: String, call: String, output: String, planned_user: Option, output_user: Option }, +} +impl HttpError for Error { + #[inline] + fn status_code(&self) -> StatusCode { + use Error::*; + match self { + DatabaseActiveVersionMismatch { .. } + | DatabaseConnect { .. } + | DatabaseGetActiveVersion { .. } + | DatabaseGetActiveVersionContent { .. } + | DatabaseGetActiveVersionMetadata { .. } + | DatabaseInconsistentActive { .. } + | PackageIndex { .. } + | Request { .. } + | RequestFailure { .. } + | ResolveData { .. } + | ResponseDeserialize { .. } + | ResponseDownload { .. } => StatusCode::INTERNAL_SERVER_ERROR, + DuplicateCallId { .. } + | DuplicateInputId { .. } + | IllegalVersionFormat { .. } + | ResolveWorkflow { .. } + | UnknownCall { .. } + | UnknownFunction { .. } + | UnknownInput { .. } + | UnknownInputToCall { .. } + | UnknownInputUser { .. } + | UnknownOwnerUser { .. } + | UnknownPackage { .. } + | UnknownPlannedUser { .. } + | UnknownTaskFormat { .. } + | UnknownWorkflowUser { .. } + | UnplannedOutputUser { .. } => StatusCode::BAD_REQUEST, + UnknownUsecase { .. } => StatusCode::NOT_FOUND, + } + } +} + + + + + +/***** HELPER FUNCTIONS *****/ +// /// Sends a GET-request and tries to deserialize the response. +// /// +// /// # Generic arguments +// /// - `R`: The [`Deserialize`]able object to expect in the response. +// /// +// /// # Arguments +// /// - `url`: The path to send a request to. +// /// +// /// # Returns +// /// A parsed `R` if the server replied with 200 OK. +// /// +// /// # Errors +// /// This function errors if we failed to send the request, receive the response or if the server did not 200 OK. +// async fn send_request(url: &str) -> Result { +// // Send the request out +// let res: Response = match reqwest::get(url.to_string()).await { +// Ok(res) => res, +// Err(err) => return Err(Error::Request { what: std::any::type_name::(), addr: url.into(), err }), +// }; +// // Check if the response makes sense +// if !res.status().is_success() { +// return Err(Error::RequestFailure { +// what: std::any::type_name::(), +// addr: url.into(), +// status: res.status(), +// raw: res.text().await.ok(), +// }); +// } + +// // Now attempt to deserialize the response +// let raw: String = match res.text().await { +// Ok(raw) => raw, +// Err(err) => return Err(Error::ResponseDownload { what: std::any::type_name::(), addr: url.into(), err }), +// }; +// let res: R = match serde_json::from_str(&raw) { +// Ok(res) => res, +// Err(err) => return Err(Error::ResponseDeserialize { what: std::any::type_name::(), addr: url.into(), err }), +// }; + +// // Done +// Ok(res) +// } + +/// Checks if all users, datasets, packages etc exist in the given workflow. +/// +/// # Arguments +/// - `wf`: The [`Workflow`] who's context to verify. +/// - `usecase`: The usecase identifier to resolve. +/// - `usecases`: The map of usescases to resolve the `usecase` to a registry address with. +/// +/// # Returns +/// A [`DataIndex`] that contains the known data in the system. +/// +/// # Errors +/// This function may error if the `usecase` is unknown, or if the remote registry does not reply (correctly). +async fn assert_workflow_context(_wf: &Workflow, usecase: &str, usecases: &HashMap) -> Result<(), Error> { + // Resolve the usecase to an address to query + debug!("Resolving usecase {usecase:?} to registry address..."); + let _api: &Address = match usecases.get(usecase) { + Some(usecase) => &usecase.api, + None => return Err(Error::UnknownUsecase { usecase: usecase.into() }), + }; + + + // // TODO: Finish this + // // Cannot really do it for now, since, unfortunately, we do not know all users (i.e., no idea + // // about scientists). Some kind of user database is, clearly, essential. + + // // // Send the request to the Brane API registry to get the current state of the datasets + // // let users: String = format!("http://{api}/infra/registries"); + // // debug!("Retrieving list of users from registry at {users:?}..."); + // // let users: HashSet = send_request::>(&users).await?.into_keys().collect(); + + // // // Check if the users are all found in the system + // // debug!("Asserting all users in workflow {:?} exist...", wf.id); + // // if let Some(user) = &wf.user { + // // if !users.contains(&user.id) { + // // return Err(Error::UnknownWorkflowUser { workflow: wf.id.clone(), user: user.id.clone() }); + // // } + // // } + // // wf.visit(AssertUserExistance::new(&wf.id, &users))?; + + + // // Check if all the packages mentioned exist in the system + // let graphql: String = format!("http://{api}/graphql"); + // debug!("Retrieving list of packages from registry at {graphql:?}..."); + // let packages: PackageIndex = match brane_tsk::api::get_package_index(&graphql).await { + // Ok(index) => index, + // Err(err) => return Err(Error::PackageIndex { addr: graphql, err }), + // }; + + // debug!("Asserting all packages in workflow {:?} exist...", wf.id); + // wf.visit(AssertPackageExistance::new(&wf.id, &packages))?; + + + // // Check if all the datasets mentioned exist in the system + // let datasets: String = format!("http://{api}/data/info"); + // debug!("Retrieving list of datasets from registry at {datasets:?}..."); + // let datasets: HashSet = send_request::>(&datasets).await?.into_keys().collect(); + + // debug!("Asserting all input datasets in workflow {:?} exist...", wf.id); + // wf.visit(AssertDataExistance::new(&wf.id, datasets))?; + + + // Done! + Ok(()) +} + +/// Interacts with the database to get the currently active policy. +/// +/// # Arguments +/// - `base_policy_hash`: A hash of the base policy that we use to ensure that the active policy is still applicable. +/// - `db`: The [`SQLiteDatabase`] connector that we use to talk to the database. +/// - `res`: Appends the active policy to this list. If there is somehow a disabled policy, the +/// policy is completely overwritten. +/// +/// # Errors +/// This function errors if we failed to interact with the database, or if no policy was currently active. +async fn get_active_policy(base_policy_hash: &str, db: &SQLiteDatabase, res: &mut String) -> Result<(), Error> { + // Time to fetch a connection + debug!("Connecting to backend database..."); + let mut conn: SQLiteConnection = match db.connect(&*DATABASE_USER).await { + Ok(conn) => conn, + Err(err) => return Err(Error::DatabaseConnect { err }), + }; + + // Get the active policy + debug!("Retrieving active policy..."); + let version: u64 = match conn.get_active_version().await { + Ok(Some(pol)) => pol, + Ok(None) => { + warn!("No active policy set in database; assuming builtin VIOLATION policy"); + *res = DENY_ALL_POLICY.into(); + return Ok(()); + }, + Err(err) => return Err(Error::DatabaseGetActiveVersion { err }), + }; + + debug!("Fetching active policy {version} metadata..."); + let md: Metadata = match conn.get_version_metadata(version).await { + Ok(Some(md)) => md, + Ok(None) => return Err(Error::DatabaseInconsistentActive { version }), + Err(err) => return Err(Error::DatabaseGetActiveVersionMetadata { version, err }), + }; + if md.attached.language.len() < 15 + || &md.attached.language.as_bytes()[..15] != b"eflint-haskell-" + || &md.attached.language.as_bytes()[15..] != base_policy_hash.as_bytes() + { + return Err(Error::DatabaseActiveVersionMismatch { + version, + got: md.attached.language, + expected: format!("eflint-haskell-{base_policy_hash}"), + }); + } + + debug!("Fetching active policy {version}..."); + match conn.get_version_content(version).await { + Ok(Some(version)) => { + res.push_str(&version); + Ok(()) + }, + Ok(None) => Err(Error::DatabaseInconsistentActive { version }), + Err(err) => Err(Error::DatabaseGetActiveVersionContent { version, err }), + } +} + + + + + +/***** VISITORS *****/ +// /// Checks whether all users mentioned in a workflow exist. +// #[derive(Debug)] +// struct AssertUserExistance<'w> { +// /// The workflow ID (for debugging) +// wf_id: &'w str, +// /// The users that exist. +// users: &'w HashSet, +// } +// impl<'w> AssertUserExistance<'w> { +// /// Constructor for the AssertUserExistance. +// /// +// /// # Arguments +// /// - `wf_id`: The ID of the workflow we're asserting. +// /// - `users`: The users that exist. Any users occuring in the workflow but not in this list +// /// will be reported. +// /// +// /// # Returns +// /// A new instance of Self, ready to kick ass and assert user existances (and there's no users +// /// to check). +// #[inline] +// fn new(wf_id: &'w str, users: &'w HashSet) -> Self { Self { wf_id, users } } +// } +// impl<'w> Visitor<'w> for AssertUserExistance<'w> { +// type Error = Error; + +// #[inline] +// fn visit_call(&mut self, elem: &'w policy_reasoner::workflow::ElemCall) -> Result, Self::Error> { +// // Check if all users contributing input are known +// for i in &elem.input { +// if let Some(from) = &i.from { +// if !self.users.contains(&from.id) { +// return Err(Error::UnknownInputUser { +// workflow: self.wf_id.into(), +// call: elem.id.clone(), +// input: i.id.clone(), +// user: from.id.clone(), +// }); +// } +// } +// } +// // Assert that only the planned user generates output +// for o in &elem.output { +// if elem.at != o.from { +// return Err(Error::UnplannedOutputUser { +// workflow: self.wf_id.into(), +// call: elem.id.clone(), +// output: o.id.clone(), +// planned_user: elem.at.as_ref().map(|e| e.id.clone()), +// output_user: o.from.as_ref().map(|e| e.id.clone()), +// }); +// } +// } + +// // Check if the planned user is known +// if let Some(user) = &elem.at { +// if !self.users.contains(&user.id) { +// return Err(Error::UnknownPlannedUser { workflow: self.wf_id.into(), call: elem.id.clone(), user: user.id.clone() }); +// } +// } + +// // Finally, check if all metadata users are known +// for m in &elem.metadata { +// if let Some((owner, _)) = &m.signature { +// if !self.users.contains(&owner.id) { +// return Err(Error::UnknownOwnerUser { +// workflow: self.wf_id.into(), +// call: elem.id.clone(), +// tag: m.tag.clone(), +// user: owner.id.clone(), +// }); +// } +// } +// } + +// // OK, continue +// Ok(Some(&elem.next)) +// } +// } + +// /// Checks whether all packages mentioned in a workflow exist. +// #[derive(Debug)] +// struct AssertPackageExistance<'w> { +// /// The workflow ID (for debugging) +// wf_id: &'w str, +// /// The users that exist. +// index: &'w PackageIndex, +// } +// impl<'w> AssertPackageExistance<'w> { +// /// Constructor for the AssertPackageExistance. +// /// +// /// # Arguments +// /// - `wf_id`: The ID of the workflow we're asserting. +// /// - `index`: The [`PackageIndex`] listing which packages exist. Any packages occuring in the +// /// workflow but not in this list will be reported. +// /// +// /// # Returns +// /// A new instance of Self, ready to check the existance of those rowdy packages. +// #[inline] +// fn new(wf_id: &'w str, index: &'w PackageIndex) -> Self { Self { wf_id, index } } +// } +// impl<'w> Visitor<'w> for AssertPackageExistance<'w> { +// type Error = Error; + +// #[inline] +// fn visit_call(&mut self, elem: &'w ElemCall) -> Result, Self::Error> { +// // Check if the package mentioned matches the Brane structure +// let (package, version, function): (&str, &str, &str) = if let Some(l) = elem.task.find('[') { +// if let Some(r) = elem.task[l + 1..].find(']') { +// if let Some(dot) = elem.task[l + 1 + r + 1..].find("::") { +// (&elem.task[..l], &elem.task[l + 1..l + 1 + r], &elem.task[l + 1 + r + 1 + dot + 2..]) +// } else { +// return Err(Error::UnknownTaskFormat { workflow: self.wf_id.into(), call: elem.id.clone(), task: elem.task.clone() }); +// } +// } else { +// return Err(Error::UnknownTaskFormat { workflow: self.wf_id.into(), call: elem.id.clone(), task: elem.task.clone() }); +// } +// } else { +// return Err(Error::UnknownTaskFormat { workflow: self.wf_id.into(), call: elem.id.clone(), task: elem.task.clone() }); +// }; + +// // See if we can parse the version +// let version: Version = match Version::from_str(version) { +// Ok(ver) => ver, +// Err(err) => { +// return Err(Error::IllegalVersionFormat { +// workflow: self.wf_id.into(), +// call: elem.id.clone(), +// task: elem.task.clone(), +// version: version.into(), +// err, +// }); +// }, +// }; + +// // OK, now check the package index +// if let Some(info) = self.index.get(package, Some(&version)) { +// if info.functions.get(function).is_none() { +// return Err(Error::UnknownFunction { +// workflow: self.wf_id.into(), +// call: elem.id.clone(), +// package: package.into(), +// version, +// function: function.into(), +// }); +// } +// } else { +// return Err(Error::UnknownPackage { workflow: self.wf_id.into(), call: elem.id.clone(), package: package.into(), version }); +// } + +// // OK, continue +// Ok(Some(&elem.next)) +// } +// } + +// /// Checks whether all datasets mentioned in a workflow exist. +// #[derive(Debug)] +// struct AssertDataExistance<'w> { +// /// The workflow ID (for debugging) +// wf_id: &'w str, +// /// The datasets that exist. +// datasets: HashSet, +// } +// impl<'w> AssertDataExistance<'w> { +// /// Constructor for the AssertDataExistance. +// /// +// /// # Arguments +// /// - `wf_id`: The ID of the workflow we're asserting. +// /// - `datasets`: The list of datasets that we already know exist. Taken by ownership to also +// /// register temporary outputs as we find them. +// /// +// /// # Returns +// /// A new instance of Self, ready to assert the heck out of datasets. +// #[inline] +// fn new(wf_id: &'w str, datasets: HashSet) -> Self { Self { wf_id, datasets } } +// } +// impl<'w> Visitor<'w> for AssertDataExistance<'w> { +// type Error = Error; + +// #[inline] +// fn visit_call(&mut self, elem: &'w ElemCall) -> Result, Self::Error> { +// // First, check if the inputs exist +// for i in &elem.input { +// if !self.datasets.contains(&i.id) { +// return Err(Error::UnknownInput { workflow: self.wf_id.into(), call: elem.id.clone(), input: i.id.clone() }); +// } +// } +// // Then register any produced outputs +// for o in &elem.output { +// self.datasets.insert(o.id.clone()); +// } + +// // OK, continue +// Ok(Some(&elem.next)) +// } +// } + +/// Asserts that the given task occurs exactly once in the workflow. +#[derive(Debug)] +struct CallFinder<'w> { + /// The workflow ID (for debugging) + wf_id: &'w str, + /// The task to find. + call: &'w str, + /// Whether we already found it or not. + found: bool, +} +impl<'w> CallFinder<'w> { + /// Constructor for the CallFinder. + /// + /// # Arguments + /// - `wf_id`: The ID of the workflow we're asserting. + /// - `call`: The ID of the call to find. + /// + /// # Returns + /// A new instance of Self, ready to sniff out the call! + #[inline] + fn new(wf_id: &'w str, call: &'w str) -> Self { Self { wf_id, call, found: false } } +} +impl<'w> Visitor<'w> for CallFinder<'w> { + type Error = Error; + + #[inline] + fn visit_call(&mut self, elem: &'w ElemCall) -> Result, Self::Error> { + // Check if it's the one + if self.call == elem.id { + if !self.found { + self.found = true; + } else { + return Err(Error::DuplicateCallId { workflow: self.wf_id.into(), call: elem.id.clone() }); + } + } + + // OK, continue + Ok(Some(&elem.next)) + } +} + +/// Asserts that the given task occurs exactly once in the workflow and that it has exactly one +/// input with the given name. +#[derive(Debug)] +struct CallInputFinder<'w> { + /// The workflow ID (for debugging) + wf_id: &'w str, + /// The task to find. + call: &'w str, + /// The input to find. + input: &'w str, + /// Whether we already found the call it or not. + found_call: bool, +} +impl<'w> CallInputFinder<'w> { + /// Constructor for the CallInputFinder. + /// + /// # Arguments + /// - `wf_id`: The ID of the workflow we're asserting. + /// - `call`: The ID of the call to find. + /// - `input`: The ID of the input to the given call to find. + /// + /// # Returns + /// A new instance of Self, ready to scooby the input to call. + #[inline] + fn new(wf_id: &'w str, call: &'w str, input: &'w str) -> Self { Self { wf_id, call, input, found_call: false } } +} +impl<'w> Visitor<'w> for CallInputFinder<'w> { + type Error = Error; + + #[inline] + fn visit_call(&mut self, elem: &'w ElemCall) -> Result, Self::Error> { + // Check if it's the one + if self.call == elem.id { + // It is, so mark it (or complain we've seen it before) + if !self.found_call { + self.found_call = true; + } else { + return Err(Error::DuplicateCallId { workflow: self.wf_id.into(), call: elem.id.clone() }); + } + + // Also verify the input exists in this call + let mut found_input: bool = false; + for i in &elem.input { + if self.input == i.id { + if !found_input { + found_input = true; + } else { + return Err(Error::DuplicateInputId { workflow: self.wf_id.into(), call: elem.id.clone(), input: i.id.clone() }); + } + } + } + if !found_input { + return Err(Error::UnknownInputToCall { workflow: self.wf_id.into(), call: elem.id.clone(), input: self.input.into() }); + } + } + + // OK, continue + Ok(Some(&elem.next)) + } +} + + + + + +/***** AUXILLARY *****/ +/// Defines the input to the [`StateResolver`]` that will be resolved to concrete info for the reasoner. +#[derive(Clone)] +pub struct Input { + // Policy-related + /// The database connector we use to connect to t' pool. + pub store: Arc>, + + // Workflow-related + /// The usecase that determines the central registry to use. + pub usecase: String, + /// The workflow to further resolve. + pub workflow: specifications::wir::Workflow, + /// Question-specific input. + pub input: QuestionInput, +} + +/// Defines question-specific input to the [`StateResolver`] that will be resolved to concrete info for the reasoner. +#[derive(Clone, Debug)] +pub enum QuestionInput { + ValidateWorkflow, + ExecuteTask { task: String }, + TransferInput { task: String, input: String }, + TransferResult { result: String }, +} + + + + + +/***** LIBRARY *****/ +/// Resolves state for the reasoner in the Brane registry. +#[derive(Clone, Debug)] +pub struct BraneStateResolver { + /// The use-cases that we use to map use-case ID to Brane central registry. + pub usecases: HashMap, + /// The hash of the base policy to ensure validity of active policy with. + pub base_policy_hash: String, +} +impl BraneStateResolver { + /// Constructor for the BraneStateResolver. + /// + /// # Arguments + /// - `usecases`: A map of usecase identifiers to information about where we find the + /// appropriate central registry for that usecase. + /// - `base_policy_hash`: The hash of the base policy to ensure validity of active policy with. + /// + /// # Returns + /// A new StateResolver, ready to resolve state. + #[inline] + pub fn new(usecases: impl IntoIterator, hash: &[u8; 32]) -> Self { + Self { usecases: usecases.into_iter().collect(), base_policy_hash: base16ct::lower::encode_string(hash) } + } +} +impl StateResolver for BraneStateResolver { + type Error = Error; + type Resolved = (String, Question); + type State = Input; + + fn resolve<'a, L>( + &'a self, + state: Self::State, + logger: &'a SessionedAuditLogger, + ) -> impl 'a + Send + Future> + where + L: Sync + AuditLogger, + { + async move { + let _span = span!( + Level::INFO, + "BraneStateResolver::resolve", + reference = logger.reference(), + usecase = state.usecase, + workflow = state.workflow.id + ); + + + // First, resolve the policy by calling the store + let mut policy: String = String::new(); + get_active_policy(&self.base_policy_hash, &state.store, &mut policy).await?; + + + // Then resolve the workflow and create the appropriate question + debug!("Compiling input workflow..."); + let id: String = state.workflow.id.clone(); + let wf: Workflow = match compile(state.workflow) { + Ok(wf) => wf, + Err(err) => return Err(Error::ResolveWorkflow { id, err }), + }; + + // Verify whether all things in the workflow exist + assert_workflow_context(&wf, &state.usecase, &self.usecases).await?; + + // Now check some question-specific input... + match state.input { + QuestionInput::ValidateWorkflow => Ok((policy, Question::ValidateWorkflow { workflow: wf })), + QuestionInput::ExecuteTask { task } => { + let mut finder = CallFinder::new(&wf.id, &task); + wf.visit(&mut finder)?; + if !finder.found { + return Err(Error::UnknownCall { workflow: wf.id.clone(), call: task }); + } + Ok((policy, Question::ExecuteTask { workflow: wf, task })) + }, + QuestionInput::TransferInput { task, input } => { + let mut finder = CallInputFinder::new(&wf.id, &task, &input); + wf.visit(&mut finder)?; + if !finder.found_call { + return Err(Error::UnknownCall { workflow: wf.id.clone(), call: task }); + } + Ok((policy, Question::TransferInput { workflow: wf, task, input })) + }, + QuestionInput::TransferResult { result } => Ok((policy, Question::TransferResult { workflow: wf, result })), + } + } + } +} diff --git a/brane-chk/src/workflow/compile.rs b/brane-chk/src/workflow/compile.rs new file mode 100644 index 00000000..d5536a3d --- /dev/null +++ b/brane-chk/src/workflow/compile.rs @@ -0,0 +1,507 @@ +// COMPILE.rs +// by Lut99 +// +// Created: +// 27 Oct 2023, 17:39:59 +// Last edited: +// 29 Apr 2025, 13:40:12 +// Auto updated? +// Yes +// +// Description: +//! Defines conversion functions between the +//! [Checker Workflow](Workflow) and the [WIR](ast::Workflow). +// + +use std::collections::{HashMap, HashSet}; +use std::panic::catch_unwind; + +use enum_debug::EnumDebug as _; +use policy_reasoner::workflow::{Dataset, Elem, ElemBranch, ElemCall, ElemLoop, ElemParallel, Entity, Metadata, Workflow}; +use specifications::data::{AvailabilityKind, DataName, PreprocessKind}; +use specifications::pc::{ProgramCounter, ResolvedProgramCounter}; +use specifications::wir as ast; +use specifications::wir::builtins::BuiltinFunctions; +use thiserror::Error; +use tracing::{Level, debug, trace}; + +use super::{preprocess, utils}; + + +/***** CONSTANTS *****/ +/// The name of the special commit call. +pub const COMMIT_CALL_NAME: &'static str = "__brane_internals::commit"; + +/// The name of the special identity function call. +pub const TOPLEVEL_RETURN_CALL_NAME: &'static str = "__brane_internals::toplevel_return"; + + + + + +/***** ERRORS *****/ +/// Defines errors that may occur when compiling an [`ast::Workflow`] to a [`Workflow`]. +#[derive(Debug, Error)] +pub enum Error { + /// No user was given in the input workflow. + #[error("User not specified in given workflow")] + MissingUser, + /// Failed to preprocess the given workflow. + #[error("Failed to preprocess input WIR workflow")] + Preprocess { + #[source] + err: super::preprocess::Error, + }, + /// Function ID was out-of-bounds. + #[error("Program counter {} is out-of-bounds (function {} has {} edges)", + pc, + if let Some(func_name) = pc.func_name() { func_name.clone() } else { pc.func_id().to_string() }, + max)] + PcOutOfBounds { pc: ResolvedProgramCounter, max: usize }, + /// A parallel edge was found who's `merge` was not found. + #[error("Parallel edge at {pc}'s merge pointer {merge} is out-of-bounds")] + ParallelMergeOutOfBounds { pc: ResolvedProgramCounter, merge: ResolvedProgramCounter }, + /// A parallel edge was found who's `merge` is not an [`ast::Edge::Join`]. + #[error("Parallel edge at {pc}'s merge edge (at {merge}) was not an Edge::Join, but an Edge::{got}")] + ParallelWithNonJoin { pc: ResolvedProgramCounter, merge: ResolvedProgramCounter, got: String }, + /// Found a join that wasn't paired with a parallel edge. + #[error("Found Join-edge without preceding Parallel-edge at {pc}")] + StrayJoin { pc: ResolvedProgramCounter }, + /// A call was performed to a non-builtin + #[error("Encountered illegal call to function '{name}' at {pc} (calls to non-task, non-builtin functions are not supported)")] + IllegalCall { pc: ResolvedProgramCounter, name: String }, + /// A `commit_result()` was found that returns more than 1 result. + #[error("Call to `commit_result()` as {pc} returns more than 1 outputs (got {got})")] + CommitTooMuchOutput { pc: ResolvedProgramCounter, got: usize }, + /// A `commit_result()` was found without output. + #[error("Call to `commit_result()` at {pc} does not return a dataset")] + CommitNoOutput { pc: ResolvedProgramCounter }, + /// A `commit_result()` was found that outputs a result instead of a dataset. + #[error("Call to `commit_result()` at {pc} returns an IntermediateResult instead of a Data")] + CommitReturnsResult { pc: ResolvedProgramCounter }, +} + + + + + +/***** HELPER FUNCTIONS *****/ +/// Analyses the given [`WIR`](ast::Workflow) graph to find the Last Known Locations (LKLs) of the datasets and results mentioned. +/// +/// # Arguments +/// - `lkls`: The map of datasets/results to Last Known Locations to populate. Maps from edge index to map of data names to possible locations it's at. +/// - `wir`: The entire workflow graph. +/// - `pc`: The [`ProgramCounter`] pointing to the current edge we're analysing. +/// - `breakpoint`: Some possible edge that, if encounters, halts the analysis and returns immediately. +fn analyse_data_lkls(lkls: &mut HashMap>, wir: &ast::Workflow, pc: ProgramCounter, breakpoint: Option) { + // Stop if we hit the breakpoint + if let Some(breakpoint) = breakpoint { + if pc == breakpoint { + return; + } + } + + // Get the edge we're talking about + let edge: &ast::Edge = match utils::get_edge(wir, pc) { + Some(edge) => edge, + None => return, + }; + + // Match the edge + trace!("Analysing data LKLs in {:?}", edge.variant()); + match edge { + ast::Edge::Linear { instrs: _, next } => { + // Note: we don't analyse data reference instantiations since it contains jack shit about the dataset referenced :/ + // Continue with the next graph + analyse_data_lkls(lkls, wir, pc.jump(*next), breakpoint) + }, + + ast::Edge::Node { task: _, locs: _, at, input, result, metadata: _, next } => { + // Mark the locations we're getting the results from + for (i, access) in input { + match access { + Some(AvailabilityKind::Available { .. }) => { + // It's available at the location of the node + if let Some(at) = at { + *lkls.entry(i.clone()).or_default() = HashSet::from([at.clone()]); + } + }, + Some(AvailabilityKind::Unavailable { how: PreprocessKind::TransferRegistryTar { location, dataname: _ } }) => { + // It's available at the planned location + *lkls.entry(i.clone()).or_default() = HashSet::from([location.clone()]); + }, + None => continue, + } + } + + // Mark where the output is, if any + if let (Some(result), Some(at)) = (result, at) { + *lkls.entry(DataName::IntermediateResult(result.clone())).or_default() = HashSet::from([at.clone()]); + } + + // Continue the analysis + analyse_data_lkls(lkls, wir, pc.jump(*next), breakpoint) + }, + + ast::Edge::Stop {} => (), + + ast::Edge::Branch { true_next, false_next, merge } => { + // Do the branches first... + analyse_data_lkls(lkls, wir, pc.jump(*true_next), merge.map(|m| pc.jump(m))); + if let Some(false_next) = false_next { + analyse_data_lkls(lkls, wir, pc.jump(*false_next), merge.map(|m| pc.jump(m))); + } + + // ...before we continue with the rest + if let Some(merge) = merge { + analyse_data_lkls(lkls, wir, pc.jump(*merge), breakpoint) + } + }, + + ast::Edge::Parallel { branches, merge } => { + // Do all the branches + for branch in branches { + analyse_data_lkls(lkls, wir, pc.jump(*branch), Some(pc.jump(*merge))); + } + + // Run the merge onwards + analyse_data_lkls(lkls, wir, pc.jump(*merge), breakpoint) + }, + + ast::Edge::Join { merge: _, next } => analyse_data_lkls(lkls, wir, pc.jump(*next), breakpoint), + + ast::Edge::Loop { cond, body, next } => { + // Build the body first + analyse_data_lkls(lkls, wir, pc.jump(*body), Some(pc.jump(*cond))); + // The condition + analyse_data_lkls(lkls, wir, pc.jump(*cond), Some(pc.jump(*body - 1))); + // And the next + if let Some(next) = next { + analyse_data_lkls(lkls, wir, pc.jump(*next), breakpoint); + } + }, + + ast::Edge::Call { input: _, result: _, next } => { + // Even for commits, we can't really do anything here (that's the whole point of this analysis, actually, to be able to), and as such continue + analyse_data_lkls(lkls, wir, pc.jump(*next), breakpoint) + }, + + ast::Edge::Return { result } => { + for res in result { + // Assume the end location + lkls.entry(res.clone()).or_default().insert("Danny Data Scientist".into()); + } + }, + } +} + +/// Reconstructs the workflow graph to [`Elem`]s instead of [`ast::Edge`]s. +/// +/// # Arguments +/// - `wir`: The [`ast::Workflow`] to analyse. +/// - `wf_id`: The identifier of the workflow we're compiling in. +/// - `calls`: The map of Call program-counter-indices to function IDs called. +/// - `lkls`: The map of program counter/dataset pairs that map to the locations where we last saw them. Mutable to update it as we make decisions for commits. +/// - `pc`: The program-counter-index of the edge to analyse. These are pairs of `(function, edge_idx)`, where main is referred to by [`usize::MAX`](usize). +/// - `plug`: The element to write when we reached the (implicit) end of a branch. +/// - `breakpoint`: An optional program-counter-index that, if given, will not analyse that edge onwards (excluding it too). +/// +/// # Returns +/// An [`Elem`] representing the given branch of the workflow. +/// +/// # Errors +/// This function errors if a definition in the Workflow was unknown. +fn reconstruct_graph( + wir: &ast::Workflow, + wf_id: &str, + calls: &HashMap, + lkls: &mut HashMap>, + pc: ProgramCounter, + plug: Elem, + breakpoint: Option, +) -> Result { + // Stop if we hit the breakpoint + if let Some(breakpoint) = breakpoint { + if pc == breakpoint { + return Ok(plug); + } + } + + // Get the edge we're talking about + let edge: &ast::Edge = match utils::get_edge(wir, pc) { + Some(edge) => edge, + None => return Ok(plug), + }; + + // Match the edge + trace!("Compiling {:?}", edge.variant()); + match edge { + ast::Edge::Linear { next, .. } => { + // Simply skip to the next, as linear connectors are no longer interesting + reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(*next), plug, breakpoint) + }, + + ast::Edge::Node { task, locs: _, at, input, result, metadata, next } => { + // Resolve the task definition + let def: &ast::ComputeTaskDef = match catch_unwind(|| wir.table.task(*task)) { + Ok(def) => { + if let ast::TaskDef::Compute(c) = def { + c + } else { + unimplemented!(); + } + }, + Err(_) => panic!("Encountered unknown task '{task}' after preprocessing"), + }; + + // Return the elem + Ok(Elem::Call(ElemCall { + id: pc_to_id(&wir, pc), + task: format!("{}[{}]::{}", def.package, def.version, def.function.name), + input: input + .iter() + .map(|(name, avail)| Dataset { + id: name.name().into(), + from: avail.as_ref().and_then(|avail| match avail { + AvailabilityKind::Available { how: _ } => None, + AvailabilityKind::Unavailable { how: PreprocessKind::TransferRegistryTar { location, dataname: _ } } => { + Some(Entity { id: location.clone() }) + }, + }), + }) + .collect(), + output: result.as_ref().map(|name| Dataset { id: name.clone(), from: at.clone().map(|id| Entity { id }) }).into_iter().collect(), + at: at.clone().map(|id| Entity { id }), + metadata: metadata + .iter() + .map(|md| Metadata { + tag: format!("{}:{}", md.owner, md.tag), + signature: md.signature.clone().map(|(entity, sig)| (Entity { id: entity }, sig)), + }) + .collect(), + next: Box::new(reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(*next), plug, breakpoint)?), + })) + }, + + ast::Edge::Stop {} => Ok(Elem::Stop), + + ast::Edge::Branch { true_next, false_next, merge } => { + // Construct the branches first + let mut branches: Vec = + vec![reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(*true_next), Elem::Next, merge.map(|merge| pc.jump(merge)))?]; + if let Some(false_next) = false_next { + branches.push(reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(*false_next), Elem::Next, merge.map(|merge| pc.jump(merge)))?) + } + + // Build the next, if there is any + let next: Elem = + merge.map(|merge| reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(merge), plug, breakpoint)).transpose()?.unwrap_or(Elem::Stop); + + // Build the elem using those branches and next + Ok(Elem::Branch(ElemBranch { branches, next: Box::new(next) })) + }, + + ast::Edge::Parallel { branches, merge } => { + // Construct the branches first + let mut elem_branches: Vec = Vec::with_capacity(branches.len()); + for branch in branches { + elem_branches.push(reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(*branch), Elem::Next, Some(pc.jump(*merge)))?); + } + + // Let us checkout that the merge point is a join + let merge_edge: &ast::Edge = match utils::get_edge(wir, pc.jump(*merge)) { + Some(edge) => edge, + None => return Err(Error::ParallelMergeOutOfBounds { pc: pc.resolved(&wir.table), merge: pc.jump(*merge).resolved(&wir.table) }), + }; + let next: usize = if let ast::Edge::Join { merge: _, next } = merge_edge { + *next + } else { + return Err(Error::ParallelWithNonJoin { + pc: pc.resolved(&wir.table), + merge: pc.jump(*merge).resolved(&wir.table), + got: merge_edge.variant().to_string(), + }); + }; + + // Build the post-join point onwards + let next: Elem = reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(next), plug, breakpoint)?; + + // We have enough to build ourselves + Ok(Elem::Parallel(ElemParallel { branches: elem_branches, next: Box::new(next) })) + }, + + ast::Edge::Join { .. } => Err(Error::StrayJoin { pc: pc.resolved(&wir.table) }), + + ast::Edge::Loop { cond, body, next } => { + // Build the body first + let body_elems: Elem = reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(*body), Elem::Next, Some(pc.jump(*cond)))?; + + // Build the condition, with immediately following the body for any open ends that we find + let cond: Elem = reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(*cond), body_elems, Some(pc.jump(*body - 1)))?; + + // Build the next + let next: Elem = + next.map(|next| reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(next), plug, breakpoint)).transpose()?.unwrap_or(Elem::Stop); + + // We have enough to build self + Ok(Elem::Loop(ElemLoop { body: Box::new(cond), next: Box::new(next) })) + }, + + ast::Edge::Call { input, result, next } => { + // Attempt to get the call ID & matching definition + let func_def: &ast::FunctionDef = match calls.get(&pc) { + Some(id) => match wir.table.funcs.get(*id) { + Some(def) => def, + None => panic!("Encountered unknown function '{id}' after preprocessing"), + }, + None => panic!("Encountered unresolved call after preprocessing"), + }; + + // Only allow calls to builtins + if func_def.name == BuiltinFunctions::CommitResult.name() { + // Deduce the commit's location (or rather, the output location) based on the inputs + let mut locs: HashSet = HashSet::with_capacity(input.len()); + let mut new_input: Vec = Vec::with_capacity(input.len()); + for i in input { + // See if it has any known locations + let location: Option = lkls.get(i).and_then(|locs| locs.iter().next().cloned()); + + // Add it to the list of possible input locations + if let Some(location) = &location { + locs.insert(location.clone()); + } + + // Then create a new Dataset with that + new_input.push(Dataset { id: i.name().into(), from: location.map(|id| Entity { id }) }); + } + + // Attempt to fetch the name of the dataset + if result.len() > 1 { + return Err(Error::CommitTooMuchOutput { pc: pc.resolved(&wir.table), got: result.len() }); + } + let data_name: String = if let Some(name) = result.iter().next() { + if let DataName::Data(name) = name { + name.clone() + } else { + return Err(Error::CommitReturnsResult { pc: pc.resolved(&wir.table) }); + } + } else { + return Err(Error::CommitNoOutput { pc: pc.resolved(&wir.table) }); + }; + + // Construct next first + let next: Elem = reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(*next), plug, breakpoint)?; + + // Then we wrap the rest in a commit + let at: Option = locs.into_iter().next().map(|id| Entity { id }); + Ok(Elem::Call(ElemCall { + id: format!("{}-{}-commit", wf_id, pc.resolved(&wir.table)), + task: COMMIT_CALL_NAME.into(), + input: new_input, + output: vec![Dataset { id: data_name, from: at.clone() }], + at, + metadata: vec![], + next: Box::new(next), + })) + } else if func_def.name == BuiltinFunctions::Print.name() + || func_def.name == BuiltinFunctions::PrintLn.name() + || func_def.name == BuiltinFunctions::Len.name() + { + // Using them is OK, we just ignore them for the improved workflow + reconstruct_graph(wir, wf_id, calls, lkls, pc.jump(*next), plug, breakpoint) + } else { + Err(Error::IllegalCall { pc: pc.resolved(&wir.table), name: func_def.name.clone() }) + } + }, + + ast::Edge::Return { result } => { + // Compile it as: final transfer, then return + Ok(Elem::Call(ElemCall { + id: format!("{}-{}-return", wf_id, pc.resolved(&wir.table)), + task: TOPLEVEL_RETURN_CALL_NAME.into(), + input: result.iter().map(|data| Dataset { id: data.name().into(), from: None }).collect(), + output: result + .iter() + .map(|data| Dataset { id: data.name().into(), from: Option::clone(&wir.user).map(|id| Entity { id }) }) + .collect(), + at: Option::clone(&wir.user).map(|id| Entity { id }), + metadata: vec![], + next: Box::new(Elem::Stop), + })) + }, + } +} + + + + + +/***** LIBRARY *****/ +/// Converts any Brane workflow program counter to a string ID used to recognize the same call +/// post-compilation. +/// +/// # Arguments +/// - `wir`: Some [`Workflow`] in which we're pointing. +/// - `pc`: The [`ProgramCounter`] pointing to the call we're getting an ID of. +/// +/// # Returns +/// A [`String`] encoding the target call's automatically generated identifier. +pub fn pc_to_id(wir: &ast::Workflow, pc: ProgramCounter) -> String { format!("{}-{}-task", wir.id, pc.resolved(&wir.table)) } + + + +/// Compiles from a Brane [WIR](brane_ast::Workflow) to a policy reasoner [Workflow]. +/// +/// # Arguments +/// - `wf`: The WIR to compile. +/// +/// # Returns +/// An equivalent [`Workflow`]. +/// +/// # Errors +/// This function can error at any time if the given `wf` is in an invalid shape for compilation. +pub fn compile(value: ast::Workflow) -> Result { + if tracing::level_filters::STATIC_MAX_LEVEL >= Level::DEBUG { + let mut buf: Vec = Vec::new(); + brane_ast::traversals::print::ast::do_traversal(&value, &mut buf).unwrap(); + debug!("Compiling workflow:\n\n{}\n", String::from_utf8(buf).unwrap()); + } + + // First, analyse the calls in the workflow as much as possible (and simplify) + let wf_id: String = value.id.clone(); + let wf_user: Option = Option::clone(&value.user); + let (wir, calls): (ast::Workflow, HashMap) = match preprocess::simplify(value) { + Ok(res) => res, + Err(err) => return Err(Error::Preprocess { err }), + }; + if tracing::level_filters::STATIC_MAX_LEVEL >= Level::DEBUG { + // Write the processed graph + let mut buf: Vec = vec![]; + brane_ast::traversals::print::ast::do_traversal(&wir, &mut buf).unwrap(); + debug!("Preprocessed workflow:\n\n{}\n", String::from_utf8_lossy(&buf)); + } + + // Collect the map of data to Last Known Locations (LKL). + let mut lkls: HashMap> = HashMap::new(); + analyse_data_lkls(&mut lkls, &wir, ProgramCounter::start(), None); + + // Alright now attempt to re-build the graph in the new style + let graph: Elem = reconstruct_graph(&wir, &wf_id, &calls, &mut lkls, ProgramCounter::start(), Elem::Stop, None)?; + + // Build a new Workflow with that! + Ok(Workflow { + id: wf_id, + start: graph, + + user: wf_user.clone().map(|id| Entity { id }), + metadata: wir + .metadata + .iter() + .map(|md| Metadata { + tag: format!("{}:{}", md.owner, md.tag), + signature: md.signature.clone().map(|(entity, sig)| (Entity { id: entity }, sig)), + }) + .collect(), + signature: wf_user.map(|id| (Entity { id }, "its_signed_i_swear_mom".into())), + }) +} diff --git a/brane-chk/src/workflow/compiler.rs b/brane-chk/src/workflow/compiler.rs new file mode 100644 index 00000000..a5551bf2 --- /dev/null +++ b/brane-chk/src/workflow/compiler.rs @@ -0,0 +1,324 @@ +// COMPILER.rs +// by Lut99 +// +// Created: +// 21 Oct 2024, 10:47:42 +// Last edited: +// 02 May 2025, 11:50:06 +// Auto updated? +// Yes +// +// Description: +//! Bonus binary that implements a `WIR` to eFLINT JSON through +//! `Workflow` compiler. +// + +use std::fmt::{Display, Formatter, Result as FResult}; +use std::fs; +use std::io::{Read, Write}; +use std::str::FromStr; + +use brane_chk::workflow::{WorkflowToEflint, compile}; +use clap::Parser; +use error_trace::trace; +use policy_reasoner::workflow::Workflow; +use specifications::wir::Workflow as Wir; +use thiserror::Error; +use tracing::{Level, debug, error, info}; + + +/***** ERRORS *****/ +/// Defines errors that fail when parsing input languages. +#[derive(Debug, Error)] +#[error("Unknown input language '{}'", self.0)] +struct UnknownInputLanguageError(String); + +/// Defines errors that fail when parsing output languages. +#[derive(Debug, Error)] +#[error("Unknown output language '{}'", self.0)] +struct UnknownOutputLanguageError(String); + + + + + +/***** ARGUMENTS *****/ +/// Defines the possible input languages (and how to parse them). +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +enum InputLanguage { + /// It's Brane WIR. + Wir, + /// It's policy reasoner Workflow. + Workflow, +} +impl Display for InputLanguage { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> FResult { + match self { + Self::Wir => write!(f, "Brane WIR"), + Self::Workflow => write!(f, "Workflow"), + } + } +} +impl FromStr for InputLanguage { + type Err = UnknownInputLanguageError; + + #[inline] + fn from_str(s: &str) -> Result { + match s { + "wir" => Ok(Self::Wir), + "wf" | "workflow" => Ok(Self::Workflow), + raw => Err(UnknownInputLanguageError(raw.into())), + } + } +} + +/// Defines the possible output languages (and how to parse them). +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +enum OutputLanguage { + /// It's policy reasoner Workflow. + Workflow, + /// It's eFLINT Itself. + EFlint, +} +impl Display for OutputLanguage { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> FResult { + match self { + Self::Workflow => write!(f, "Workflow"), + Self::EFlint => write!(f, "eFLINT"), + } + } +} +impl FromStr for OutputLanguage { + type Err = UnknownOutputLanguageError; + + #[inline] + fn from_str(s: &str) -> Result { + match s { + "wf" | "workflow" => Ok(Self::Workflow), + "eflint" | "eflint-dsl" => Ok(Self::EFlint), + raw => Err(UnknownOutputLanguageError(raw.into())), + } + } +} + + + +/// Defines the arguments of the binary. +#[derive(Debug, Parser)] +struct Arguments { + /// Whether to enable debug statements + #[clap(long, help = "If given, enables INFO- and DEBUG-level log statements.")] + debug: bool, + /// Whether to enable trace statements. + #[clap(long, help = "If given, enables TRACE-level log statements.")] + trace: bool, + + /// The input file to compile. + #[clap(name = "INPUT", default_value = "-", help = "The input file to compile. You can use '-' to compile from stdin.")] + input: String, + /// The output file to write to. + #[clap(short, long, default_value = "-", help = "The output file to compile to. You can use '-' to write to stdout.")] + output: String, + + /// The input language to compile from. + #[clap( + short = '1', + default_value = "wir", + help = "The input language to compile from. Options are 'wir' for Brane's WIR, or 'wf'/'workflow' for the policy reasoner's workflow \ + representation." + )] + input_lang: InputLanguage, + /// The output language to compile to. + #[clap( + short = '2', + long, + default_value = "eflint", + help = "The output language to compile to. Options are 'wf'/'workflow' for the policy reasoner's workflow representation, or \ + 'eflint'/'eflint-dsl' for the eFLINT DSL." + )] + output_lang: OutputLanguage, +} + + + + + +/***** FUNCTIONS *****/ +/// Reads the input, then compiles it to a [`Workflow`]. +/// +/// # Arguments +/// - `path`: The path (or '-' for stdin) where the input may be found. +/// - `lang`: The [`InputLanguage`] determining how to get to a workflow. +/// +/// # Returns +/// A [`Workflow`] that we parsed from the input. +/// +/// # Errors +/// This function fails if we failed to read the input (file or stdin), or if the input couldn't +/// be compiled (it was invalid somehow). +/// +/// Note that it errors by calling [`std::process::exit()`]. +#[inline] +fn input_to_workflow(path: &str, lang: InputLanguage) -> Workflow { + // Read the input file + let input: String = if path == "-" { + debug!("Reading input from stdin..."); + let mut input: String = String::new(); + if let Err(err) = std::io::stdin().read_to_string(&mut input) { + error!("{}", trace!(("Failed to read from stdin"), err)); + std::process::exit(1); + } + input + } else { + debug!("Reading input '{path}' from file..."); + match fs::read_to_string(path) { + Ok(input) => input, + Err(err) => { + error!("{}", trace!(("Failed to read input file '{path}'"), err)); + std::process::exit(1); + }, + } + }; + + // See if we need to parse it as a Workflow or as a WIR + match lang { + InputLanguage::Wir => { + // Parse it as WIR, first + debug!("Parsing input as Brane WIR..."); + let wir: Wir = match serde_json::from_str(&input) { + Ok(wir) => wir, + Err(err) => { + error!( + "{}", + trace!(("Failed to parse {} as Brane WIR", if path == "-" { "stdin".into() } else { format!("input file '{path}'") }), err) + ); + std::process::exit(1); + }, + }; + + // Then compile it to a Workflow + let wir_id: String = wir.id.clone(); + debug!("Compiling Brane WIR '{wir_id}' to a workflow..."); + match compile(wir) { + Ok(wf) => wf, + Err(err) => { + error!("{}", trace!(("Failed to compile input Brane WIR '{wir_id}' to a workflow"), err)); + std::process::exit(1); + }, + } + }, + + InputLanguage::Workflow => { + // It sufficies to parse as Workflow directly + debug!("Parsing input as a workflow..."); + match serde_json::from_str(&input) { + Ok(wf) => wf, + Err(err) => { + error!( + "{}", + trace!(("Failed to parse {} as a workflow", if path == "-" { "stdin".into() } else { format!("input file '{path}'") }), err) + ); + std::process::exit(1); + }, + } + }, + } +} + +/// Takes a [`Workflow`] and writes it to the given output, potentially after compilation. +/// +/// # Arguments +/// - `path`: The path (or '-' for stdin) where the output should be written to. +/// - `lang`: The [`OutputLanguage`] determining what to write. +/// - `workflow`: The [`Workflow`] to output. +/// +/// # Errors +/// This function fails if we failed to translate the workflow to the appropriate output language, +/// or if we failed to write to the output (either stdout or file). +/// +/// Note that it errors by calling [`std::process::exit()`]. +#[inline] +fn workflow_to_output(path: &str, lang: OutputLanguage, workflow: Workflow) { + // See if we need to serialize the Workflow or compile it first + let output: String = match lang { + OutputLanguage::Workflow => { + // It sufficies to serialize the Workflow directly + debug!("Serializing workflow '{}' to JSON...", workflow.id); + match serde_json::to_string_pretty(&workflow) { + Ok(raw) => raw, + Err(err) => { + error!("{}", trace!(("Failed to serialize given workflow '{}'", workflow.id), err)); + std::process::exit(1); + }, + } + }, + + OutputLanguage::EFlint => { + // Compile it to eFLINT, first + debug!("Compiling workflow '{}' to eFLINT JSON...", workflow.id); + WorkflowToEflint(&workflow).to_string() + }, + }; + + // OK, now write to out or stdout + if path == "-" { + debug!("Writing result to stdout..."); + if let Err(err) = std::io::stdout().write_all(&output.as_bytes()) { + error!("{}", trace!(("Failed to write to stdout"), err)); + std::process::exit(1); + } + } else { + debug!("Writing result to output file '{path}'..."); + if let Err(err) = fs::write(path, output) { + error!("{}", trace!(("Failed to write to output file '{path}'"), err)); + std::process::exit(1); + } + } +} + + + + + +/***** ENTRYPOINT *****/ +fn main() { + // Parse the arguments + let args = Arguments::parse(); + + // Setup the logger + tracing_subscriber::fmt() + .with_max_level(if args.trace { + Level::TRACE + } else if args.debug { + Level::DEBUG + } else { + Level::WARN + }) + .init(); + info!("{} - v{}", env!("CARGO_BIN_NAME"), env!("CARGO_PKG_VERSION")); + + // Get the input workflow + let workflow: Workflow = input_to_workflow(&args.input, args.input_lang); + if tracing::level_filters::STATIC_MAX_LEVEL >= Level::DEBUG { + debug!( + "Parsed workflow form input:\n{}\n{}\n{}", + (0..80).map(|_| '-').collect::(), + workflow.visualize(), + (0..80).map(|_| '-').collect::() + ); + } + + // Then write to the output workflow + workflow_to_output(&args.output, args.output_lang, workflow); + + // Done! + println!( + "Successfully compiled {} ({}) to {} ({})", + if args.input == "-" { "stdin".into() } else { format!("input file '{}'", args.input) }, + args.input_lang, + if args.output == "-" { "stdout".into() } else { format!("output file '{}'", args.output) }, + args.output_lang, + ); +} diff --git a/brane-chk/src/workflow/eflint.rs b/brane-chk/src/workflow/eflint.rs new file mode 100644 index 00000000..db53ed14 --- /dev/null +++ b/brane-chk/src/workflow/eflint.rs @@ -0,0 +1,531 @@ +// EFLINT JSON.rs +// by Lut99 +// +// Created: +// 19 Oct 2024, 10:21:59 +// Last edited: +// 02 May 2025, 09:27:19 +// Auto updated? +// Yes +// +// Description: +//! Implements a compiler from a [`Workflow`] to a series of +//! [`efint_json`] [`Phrase`]s. +// + +use std::collections::{HashMap, HashSet}; +use std::convert::Infallible; +use std::fmt::{Display, Formatter, Result as FResult}; + +use policy_reasoner::workflow::visitor::Visitor; +use policy_reasoner::workflow::{Dataset, Elem, ElemBranch, ElemCall, ElemLoop, ElemParallel, Entity, Metadata, Workflow}; +use rand::Rng as _; +use rand::distributions::Alphanumeric; +use tracing::{trace, warn}; + +use super::compile::COMMIT_CALL_NAME; +use crate::workflow::compile::TOPLEVEL_RETURN_CALL_NAME; + + +/***** HELPER MACROS *****/ +/// Shorthand for creating an eFLINT JSON Specification true postulation. +macro_rules! create { + ($inst:expr) => { + format!("+{}.\n", $inst) + }; +} + +/// Shorthand for creating an eFLINT JSON Specification constructor application. +macro_rules! constr_app { + ($id:expr $(, $args:expr)* $(,)?) => { + { + let mut res = $id.to_string(); + res.push('('); + let mut first = true; + $(#[allow(unused)] if !first { res.push_str(", ") } else { first = false } + res.push_str(&$args.to_string());)* + res.push(')'); + res + } + }; +} + +/// Shorthand for creating an eFLINT JSON Specification string literal. +macro_rules! str_lit { + ($val:expr) => { + format!("{:?}", $val) + }; +} + + + + + +/***** HELPER FUNCTIONS *****/ +/// Compiles a given piece of metadata. +/// +/// # Arguments +/// - `metadata`: The [`Metadata`] to compile. +/// - `phrases`: The buffer to compile to. +fn compile_metadata(metadata: &Metadata, phrases: &mut Vec) { + // First, we push the tag + // ```eflint + // +tag(#metadata.tag). + // ``` + let tag: String = constr_app!("tag", str_lit!(metadata.tag.clone())); + phrases.push(create!(tag.clone())); + + // Push the signature + let signature: String = if let Some((owner, signature)) = &metadata.signature { + // ```eflint + // +signature(user(#owner), #signature). + // ``` + constr_app!("signature", constr_app!("user", str_lit!(owner.id.clone())), str_lit!(signature.clone())) + } else { + // Push an empty signature, to be sure that the one is in serialized metadata is still findable + // ```eflint + // +signature(user(""), ""). + // ``` + constr_app!("signature", constr_app!("user", str_lit!("")), str_lit!("")) + }; + phrases.push(create!(signature.clone())); + + // Then push the metadata as a whole + phrases.push(create!(constr_app!("metadata", tag, signature))); +} + + + + + +/***** FORMATTERS *****/ +/// Serializes a workflow to eFLINT using it's [`Display`]-implementation. +pub struct WorkflowToEflint<'w>(pub &'w Workflow); +impl<'w> Display for WorkflowToEflint<'w> { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> FResult { eflint_fmt(&self.0, f) } +} + + + + + +/***** VISITORS *****/ +/// Names all loops in a [`Workflow`]. +struct LoopNamer<'w> { + /// The identifier of the workflow. + wf_id: &'w str, + /// Stores the names of the loops. + loops: HashMap<*const ElemLoop, String>, +} +impl<'w> LoopNamer<'w> { + /// Constructor for the LoopNamer. + /// + /// # Arguments + /// - `wf_id`: The identifier of the workflow we're considering. + /// + /// # Returns + /// A new LoopNamer, ready for naming. + #[inline] + pub fn new(wf_id: &'w str) -> Self { Self { wf_id, loops: HashMap::new() } } +} +impl<'w> Visitor<'w> for LoopNamer<'w> { + type Error = Infallible; + + fn visit_loop(&mut self, elem: &'w ElemLoop) -> Result, Self::Error> { + let ElemLoop { body, next } = elem; + + // Generate a name for this loop + self.loops.insert( + elem as *const ElemLoop, + format!("{}-{}-loop", self.wf_id, rand::thread_rng().sample_iter(Alphanumeric).take(4).map(char::from).collect::()), + ); + + // Continue + self.visit(body)?; + Ok(Some(next)) + } +} + +/// Finds the flows of datasets through a sequence of elements as if it was a single element. +struct DataAnalyzer<'w> { + /// The names of loops we've already found. + names: &'w HashMap<*const ElemLoop, String>, + /// The first nodes that we encounter with their (potential) inputs. + /// + /// There can be more than one if a branch or parallel is found. + first: Vec<(String, HashSet)>, + /// The (potential) outputs of this chain of elements. + last: HashSet, +} +impl<'w> DataAnalyzer<'w> { + /// Constructor for the DataAnalyzer. + /// + /// # Arguments + /// - `names`: A list of names for loops. + /// + /// # Returns + /// A new DataAnalyzer struct, ready to analyze. + #[inline] + pub fn new(names: &'w HashMap<*const ElemLoop, String>) -> Self { Self { names, first: Vec::new(), last: HashSet::new() } } +} +impl<'w> Visitor<'w> for DataAnalyzer<'w> { + type Error = Infallible; + + fn visit_call(&mut self, elem: &'w ElemCall) -> Result, Self::Error> { + // Log it's the first if we haven't found any yet + if self.first.is_empty() { + self.first.push((elem.id.clone(), elem.input.iter().cloned().collect())); + } + self.last.clear(); + self.last.extend(elem.output.iter().cloned()); + + // Continue + Ok(Some(&elem.next)) + } + + fn visit_branch(&mut self, elem: &'w ElemBranch) -> Result, Self::Error> { + // Aggregate the inputs & outputs of the branches + let add_firsts: bool = !self.first.is_empty(); + self.last.clear(); + for branch in &elem.branches { + let mut analyzer = Self::new(self.names); + analyzer.visit(branch)?; + if add_firsts { + self.first.extend(analyzer.first); + } + self.last.extend(analyzer.last); + } + + // OK, continue with the branch's next + Ok(Some(&elem.next)) + } + + fn visit_parallel(&mut self, elem: &'w ElemParallel) -> Result, Self::Error> { + // Aggregate the inputs & outputs of the branches + let add_firsts: bool = !self.first.is_empty(); + self.last.clear(); + for branch in &elem.branches { + let mut analyzer = Self::new(self.names); + analyzer.visit(branch)?; + if add_firsts { + self.first.extend(analyzer.first); + } + self.last.extend(analyzer.last); + } + + // OK, continue with the branch's next + Ok(Some(&elem.next)) + } + + fn visit_loop(&mut self, elem: &'w ElemLoop) -> Result, Self::Error> { + // We recurse to find the inputs- and outputs + let mut analyzer = Self::new(self.names); + analyzer.visit(&elem.body)?; + + // Propagate these + if self.first.is_empty() { + // Get the loop's name + let id: &String = self.names.get(&(elem as *const ElemLoop)).unwrap_or_else(|| panic!("Encountered loop without name after loop naming")); + + // Set this loop as the first node, combining all the input dataset from the children + self.first.push((id.clone(), analyzer.first.into_iter().flat_map(|(_, data)| data).collect())); + } + self.last.clear(); + self.last.extend(analyzer.last.into_iter()); + + // Continue with iteration + Ok(Some(&elem.next)) + } +} + +/// Compiles the calls & loops in the given sequence to eFLINT phrases. +struct EFlintCompiler<'w> { + /// The identifier of the workflow. + wf_id: &'w str, + /// The end user of the workflow. + wf_user: &'w Option, + /// The names of loops we've already found. + names: &'w HashMap<*const ElemLoop, String>, + /// The phrases we're compiling to. + phrases: Vec, +} +impl<'w> EFlintCompiler<'w> { + /// Constructor for the EFlintCompiler. + /// + /// # Arguments + /// - `wf_id`: The identifier of the workflow we're considering. + /// - `wf_user`: The end user of the workflow we're considering. + /// - `names`: A list of names for loops. + /// + /// # Returns + /// A new EFlintCompiler struct, ready to compile. + #[inline] + pub fn new(wf_id: &'w str, wf_user: &'w Option, names: &'w HashMap<*const ElemLoop, String>) -> Self { + Self { wf_id, wf_user, names, phrases: Vec::new() } + } +} +impl<'w> Visitor<'w> for EFlintCompiler<'w> { + type Error = Infallible; + + #[inline] + fn visit_call(&mut self, elem: &'w ElemCall) -> Result, Self::Error> { + trace!("Compiling Elem::Call to eFLINT"); + + // Define a new task call and make it part of the workflow + // ```eflint + // +node(workflow(#wf_id), #id). + // +task(node(workflow(#wf_id), #id)). + // ``` + let node: String = constr_app!("node", constr_app!("workflow", str_lit!(self.wf_id)), str_lit!(elem.id.clone())); + self.phrases.push(create!(node.clone())); + if elem.task == COMMIT_CALL_NAME { + self.phrases.push(create!(constr_app!("commit", node.clone()))); + } else if elem.task == TOPLEVEL_RETURN_CALL_NAME { + if let Some(wf_user) = self.wf_user { + // Mark the results as results of the workflow + for r in &elem.input { + // ```eflint + // +workflow-result-recipient(workflow-result(workflow(#wf_id), asset(#r.name)), user(#wf_user.name)). + // ``` + self.phrases.push(create!(constr_app!( + "workflow-result-recipient", + constr_app!("workflow-result", constr_app!("workflow", str_lit!(self.wf_id)), constr_app!("asset", str_lit!(r.id.clone()))), + constr_app!("user", str_lit!(wf_user.id.clone())), + ))); + } + } + + // Continue + return Ok(Some(&elem.next)); + } else { + self.phrases.push(create!(constr_app!("task", node.clone()))); + } + + // Link the code input + // ```eflint + // +node-input(#node, asset("#package[#version]")). + // +function(node-input(#node, asset("#package[#version]")), #name). + // ``` + let package: &str = match elem.task.find("::") { + Some(pos) => &elem.task[..pos], + None => &elem.task, + }; + let function: &str = match elem.task.find("::") { + Some(pos) => &elem.task[pos + 2..], + None => &elem.task, + }; + let code_input: String = constr_app!("node-input", node.clone(), constr_app!("asset", str_lit!(package))); + self.phrases.push(create!(code_input.clone())); + self.phrases.push(create!(constr_app!("function", code_input.clone(), str_lit!(function)))); + + // Add its inputs + for i in &elem.input { + // Link this input to the task + // ```eflint + // +node-input(#node, asset(#i.name)). + // ``` + let node_input: String = constr_app!("node-input", node.clone(), constr_app!("asset", str_lit!(i.id.clone()))); + self.phrases.push(create!(node_input.clone())); + + // Add where this dataset lives if we know that + if let Some(from) = &i.from { + // It's planned to be transferred from this location + // ```eflint + // +node-input-from(#node-input, domain(user(#from))). + // ``` + self.phrases.push(create!(constr_app!( + "node-input-from", + node_input, + constr_app!("domain", constr_app!("user", str_lit!(from.id.clone()))) + ))); + } else if let Some(at) = &elem.at { + // It's present on the task's location + // ```eflint + // +node-input-from(#node-input, domain(user(#at))). + // ``` + self.phrases.push(create!(constr_app!( + "node-input-from", + node_input, + constr_app!("domain", constr_app!("user", str_lit!(at.id.clone()))) + ))); + } else { + warn!("Encountered input dataset '{}' without transfer source in task '{}' as part of workflow '{}'", i.id, elem.id, self.wf_id); + } + } + // Add the output, if any + for o in &elem.output { + // ```eflint + // +node-output(#node, asset(#o.name)). + // ``` + self.phrases.push(create!(constr_app!("node-output", node.clone(), constr_app!("asset", str_lit!(o.id.clone()))))); + } + // Add the location of the task execution + if let Some(at) = &elem.at { + // ```eflint + // +node-at(#node, domain(user(#at))). + // ``` + self.phrases.push(create!(constr_app!("node-at", node.clone(), constr_app!("domain", constr_app!("user", str_lit!(at.id.clone())))))); + } else { + warn!("Encountered unplanned task '{}' part of workflow '{}'", elem.id, self.wf_id); + } + + // Finally, add any task metadata + for m in &elem.metadata { + // Write the metadata's children + compile_metadata(m, &mut self.phrases); + + // Resolve the metadata's signature + let (owner, signature): (&str, &str) = + m.signature.as_ref().map(|(owner, signature)| (owner.id.as_str(), signature.as_str())).unwrap_or(("", "")); + + // Write the phrase + // ```eflint + // +node-metadata(#node, metadata(tag(#m.tag), signature(user(#m.assigner), #m.signature)))). + // ``` + self.phrases.push(create!(constr_app!( + "node-metadata", + node.clone(), + constr_app!( + "metadata", + constr_app!("tag", str_lit!(m.tag.clone())), + constr_app!("signature", constr_app!("user", str_lit!(owner)), str_lit!(signature)), + ) + ))); + } + + // OK, move to the next + Ok(Some(&elem.next)) + } + + #[inline] + fn visit_loop(&mut self, elem: &'w ElemLoop) -> Result, Self::Error> { + // Serialize the body phrases first + self.visit(&elem.body)?; + + // Serialize the node + // ```eflint + // +node(workflow(#wf_id), #id). + // +loop(node(workflow(#wf_id), #id)). + // ``` + let id: &String = self.names.get(&(elem as *const ElemLoop)).unwrap_or_else(|| panic!("Found unnamed loop after loop naming")); + let node: String = constr_app!("node", constr_app!("workflow", str_lit!(self.wf_id)), str_lit!(id.clone())); + self.phrases.push(create!(node.clone())); + self.phrases.push(create!(constr_app!("loop", node.clone()))); + + // Collect the inputs & outputs of the body + let mut analyzer = DataAnalyzer::new(&self.names); + analyzer.visit(&elem.body)?; + + // Post-process the input into a list of body nodes and a list of data input + let (bodies, inputs): (Vec, Vec>) = analyzer.first.into_iter().unzip(); + let inputs: HashSet = inputs.into_iter().flatten().collect(); + + // Add the loop inputs + for input in inputs { + // ```eflint + // +node-input(#node, asset(#i.name)). + // ``` + let node_input: String = constr_app!("node-input", node.clone(), constr_app!("asset", str_lit!(input.id.clone()))); + self.phrases.push(create!(node_input.clone())); + + // Add where this dataset lives if we know that + if let Some(from) = &input.from { + // It's planned to be transferred from this location + // ```eflint + // +node-input-from(#node-input, domain(user(#from))). + // ``` + self.phrases.push(create!(constr_app!( + "node-input-from", + node_input, + constr_app!("domain", constr_app!("user", str_lit!(from.id.clone()))) + ))); + } else { + warn!("Encountered input dataset '{}' without transfer source in commit '{}' as part of workflow '{}'", input.id, id, self.wf_id); + } + } + // Add the loop outputs + for output in analyzer.last { + // ```eflint + // +node-output(#node, asset(#output.name)). + // ``` + self.phrases.push(create!(constr_app!("node-output", node.clone(), constr_app!("asset", str_lit!(output.id.clone()))))); + } + // Add the loop's bodies + for body in bodies { + // ```eflint + // +loop-body(loop(#node), node(workflow(#wf_id), #body)). + // ``` + self.phrases.push(create!(constr_app!( + "loop-body", + constr_app!("loop", node.clone()), + constr_app!("node", constr_app!("workflow", str_lit!(self.wf_id)), str_lit!(body)) + ))); + } + + // Done, continue with the next one + Ok(Some(&elem.next)) + } +} + + + + + +/***** LIBRARY FUNCTIONS *****/ +/// Compiles a [`Workflow`] to a series of [`efint_json`] [`Phrase`]s. +/// +/// # Arguments +/// - `wf`: The [`Workflow`] to compile. +/// - `f`: Some [`Formatter`] to write to. +/// +/// # Errors +/// This function fails if it fails to write to the given `f`ormatter. +pub fn eflint_fmt(wf: &Workflow, f: &mut Formatter) -> FResult { + // First, we shall name all loops + let mut namer = LoopNamer::new(&wf.id); + namer.visit(&wf.start).unwrap(); + + // Start the compiler + let mut compiler = EFlintCompiler::new(&wf.id, &wf.user, &namer.loops); + + // Kick off the first phrase(s) by adding the notion of the workflow as a whole + // ```eflint + // +workflow(#self.id). + // ``` + let workflow = constr_app!("workflow", str_lit!(wf.id.clone())); + compiler.phrases.push(create!(workflow.clone())); + + // Add workflow metadata + for m in &wf.metadata { + // Write the metadata's children + compile_metadata(m, &mut compiler.phrases); + + // Resolve the metadata's signature + let (owner, signature): (&str, &str) = + m.signature.as_ref().map(|(owner, signature)| (owner.id.as_str(), signature.as_str())).unwrap_or(("", "")); + + // Write the phrase + // ```eflint + // +workflow-metadata(#workflow, metadata(tag(#m.tag), signature(user(#m.assigner), #m.signature)))). + // ``` + compiler.phrases.push(create!(constr_app!( + "workflow-metadata", + workflow.clone(), + constr_app!( + "metadata", + constr_app!("tag", str_lit!(m.tag.clone())), + constr_app!("signature", constr_app!("user", str_lit!(owner)), str_lit!(signature)), + ) + ))); + } + + // Compile the 'flow to a list of phrases + compiler.visit(&wf.start).unwrap(); + + // Done! + for phrase in compiler.phrases { + write!(f, "{phrase}")?; + } + Ok(()) +} diff --git a/brane-chk/src/workflow/mod.rs b/brane-chk/src/workflow/mod.rs new file mode 100644 index 00000000..593923de --- /dev/null +++ b/brane-chk/src/workflow/mod.rs @@ -0,0 +1,26 @@ +// MOD.rs +// by Lut99 +// +// Created: +// 17 Oct 2024, 16:39:23 +// Last edited: +// 29 Apr 2025, 23:36:22 +// Auto updated? +// Yes +// +// Description: +//! Contains code for compiling the Brane WIR to the policy reasoner's +//! version of a workflow. +// + +// Declare submodules +pub mod compile; +pub mod eflint; +pub mod preprocess; +#[cfg(test)] +mod tests; +mod utils; + +// Decide what to put in this namespace +pub use compile::compile; +pub use eflint::{WorkflowToEflint, eflint_fmt}; diff --git a/brane-chk/src/workflow/preprocess.rs b/brane-chk/src/workflow/preprocess.rs new file mode 100644 index 00000000..1ee35dc5 --- /dev/null +++ b/brane-chk/src/workflow/preprocess.rs @@ -0,0 +1,1108 @@ +// PREPROCESS.rs +// by Lut99 +// +// Created: +// 02 Nov 2023, 14:52:26 +// Last edited: +// 29 Apr 2025, 13:40:25 +// Auto updated? +// Yes +// +// Description: +//! Defines a preprocessing step on a [WIR](Workflow) that simplifies it +//! to increase the support of the simpler checker workflow. +// + +use std::collections::{HashMap, HashSet}; +use std::error; +use std::fmt::{Display, Formatter, Result as FResult}; +use std::panic::catch_unwind; +use std::sync::Arc; + +use enum_debug::EnumDebug as _; +use specifications::pc::{ProgramCounter, ResolvedProgramCounter}; +use specifications::wir::builtins::BuiltinFunctions; +use specifications::wir::func_id::FunctionId; +use specifications::wir::merge_strategy::MergeStrategy; +use specifications::wir::{Edge, EdgeInstr, FunctionDef, SymTable, TaskDef, Workflow}; +use tracing::{Level, debug, trace}; + +use super::utils; + + +/***** TESTS *****/ +#[cfg(test)] +mod tests { + use std::ffi::OsStr; + use std::path::PathBuf; + + use brane_ast::traversals::print::ast; + use brane_ast::{CompileResult, ParserOptions, compile_program}; + use brane_shr::utilities::{create_data_index_from, create_package_index_from, test_on_dsl_files_in}; + use humanlog::{DebugMode, HumanLogger}; + use specifications::data::DataIndex; + use specifications::package::PackageIndex; + + use super::*; + + /// Runs checks to verify the workflow inlining analysis + #[test] + fn test_checker_workflow_inline_analysis() { + // Setup logger if told + if std::env::var("TEST_LOGGER").map(|value| value == "1" || value == "true").unwrap_or(false) { + if let Err(err) = HumanLogger::terminal(DebugMode::Full).init() { + eprintln!("WARNING: Failed to setup test logger: {err} (no logging for this session)"); + } + } + + // Defines a few test files with expected inlinable functions + let tests: [(&str, &str, HashMap>>); 5] = [ + ("case1", r#"println("Hello, world!");"#, HashMap::from([(1, None)])), + ( + "case2", + r#"func hello_world() { return "Hello, world!"; } println(hello_world());"#, + HashMap::from([(1, None), (4, Some(HashSet::new()))]), + ), + ( + "case3", + r#"func foo() { return "Foo"; } func foobar() { return foo() + "Bar"; } println(foobar());"#, + HashMap::from([(1, None), (4, Some(HashSet::new())), (5, Some(HashSet::from([4])))]), + ), + ("case4", r#"import hello_world; println(hello_world());"#, HashMap::from([(1, None)])), + ( + "case5", + r#"func hello_world(n) { if (n <= 0) { return "Hello, world!"; } else { return "Hello, " + hello_world(n - 1) + "\n"; } } println(hello_world(3));"#, + HashMap::from([(1, None), (4, None)]), + ), + ]; + + // Load example package- and data indices + let tests_path: PathBuf = PathBuf::from(super::super::tests::TESTS_DIR); + let pindex: PackageIndex = create_package_index_from(tests_path.join("packages")); + let dindex: DataIndex = create_data_index_from(tests_path.join("data")); + + // Test them each + for (id, test, gold) in tests.into_iter() { + // Compile to BraneScript (we'll assume this works) + let wir: Workflow = match compile_program(test.as_bytes(), &pindex, &dindex, &ParserOptions::bscript()) { + CompileResult::Workflow(wir, _) => wir, + CompileResult::Err(errs) => { + for err in errs { + err.prettyprint(format!("<{id}>"), test); + } + panic!("Failed to compile BraneScript (see error above)"); + }, + CompileResult::Eof(err) => { + err.prettyprint(format!("<{id}>"), test); + panic!("Failed to compile BraneScript (see error above)"); + }, + + _ => { + unreachable!(); + }, + }; + // Emit the compiled workflow + println!("{}", (0..80).map(|_| '-').collect::()); + println!("Test '{id}'"); + println!(); + ast::do_traversal(&wir, std::io::stdout()).unwrap(); + println!(); + + // Analyse function calls (we'll assume this works too) + let calls: HashMap = resolve_calls(&wir, &wir.table, &mut vec![], ProgramCounter::start(), None, None).unwrap().0; + println!( + "Resolved functions calls: {:?}", + calls.iter().map(|(pc, func_id)| (format!("{}", pc.resolved(&wir.table)), *func_id)).collect::>() + ); + + // Analyse the inlinable funcs + let mut pred: HashMap>> = HashMap::with_capacity(calls.len()); + find_inlinable_funcs(&wir, &calls, &mut vec![], ProgramCounter::start(), None, &mut pred); + println!("Inlinable functions: {pred:?}"); + println!(); + + // Neat, done, assert it was right + assert_eq!(pred, gold); + } + } + + /// Runs the workflow inlining on the test files only + #[test] + fn test_checker_workflow_simplify() { + let tests_path: PathBuf = PathBuf::from(super::super::tests::TESTS_DIR); + + // Setup logger if told + if std::env::var("TEST_LOGGER").map(|value| value == "1" || value == "true").unwrap_or(false) { + if let Err(err) = HumanLogger::terminal(DebugMode::Full).init() { + eprintln!("WARNING: Failed to setup test logger: {err} (no logging for this session)"); + } + } + // Scope the function + let test_file: Option = std::env::var("TEST_FILE").ok(); + + // Run the compiler for every applicable DSL file + test_on_dsl_files_in("BraneScript", &tests_path, |path: PathBuf, code: String| { + // Skip if not the file we're looking for + if let Some(test_file) = &test_file { + if path.file_name().is_none() || path.file_name().unwrap().to_string_lossy() != test_file.as_str() { + return; + } + } + + // Start by the name to always know which file this is + println!("{}", (0..80).map(|_| '-').collect::()); + println!("File '{}' gave us:", path.display()); + + // Skip some files, sadly + if let Some(name) = path.file_name() { + if name == OsStr::new("class.bs") { + println!("Skipping test, since instance calling is not supported in checker workflows..."); + println!("{}\n\n", (0..80).map(|_| '-').collect::()); + return; + } + } + + // Load the package index + let pindex: PackageIndex = create_package_index_from(tests_path.join("packages")); + let dindex: DataIndex = create_data_index_from(tests_path.join("data")); + + // Compile the raw source to WIR + let wir: Workflow = match compile_program(code.as_bytes(), &pindex, &dindex, &ParserOptions::bscript()) { + CompileResult::Workflow(wir, warns) => { + // Print warnings if any + for w in warns { + w.prettyprint(path.to_string_lossy(), &code); + } + wir + }, + CompileResult::Eof(err) => { + // Print the error + err.prettyprint(path.to_string_lossy(), &code); + panic!("Failed to compile to WIR (see output above)"); + }, + CompileResult::Err(errs) => { + // Print the errors + for e in errs { + e.prettyprint(path.to_string_lossy(), &code); + } + panic!("Failed to compile to WIR (see output above)"); + }, + + _ => { + unreachable!(); + }, + }; + + // Alright preprocess it + let wir: Workflow = match simplify(wir) { + Ok((wir, _)) => wir, + Err(err) => { + panic!("Failed to preprocess WIR: {err}"); + }, + }; + + // Now print the file for prettyness + ast::do_traversal(&wir, std::io::stdout()).unwrap(); + println!("{}\n\n", (0..80).map(|_| '-').collect::()); + }); + } +} + + + + + +/***** ERRORS *****/ +/// Defines errors that may occur when preprocessing a [`Workflow`]. +#[derive(Debug)] +pub enum Error { + /// Unknown task given. + UnknownTask { id: usize }, + /// Unknown function given. + UnknownFunc { id: FunctionId }, + /// A [`Call`](ast::Edge::Call)-edge was encountered while we didn't know of a function ID on the stack. + CallingWithoutId { pc: ResolvedProgramCounter }, +} +impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> FResult { + use Error::*; + match self { + UnknownTask { id } => write!(f, "Encountered unknown task ID {id} in Node"), + UnknownFunc { id } => write!(f, "Encountered unknown function ID {id} in Call"), + CallingWithoutId { pc } => write!(f, "Attempted to call function at {pc} without statically known task ID on the stack"), + } + } +} +impl error::Error for Error {} + + + + + +/***** ANALYSIS FUNCTIONS *****/ +/// Checks whether the given stream of instructions would end with a function ID on top of the stack. +/// +/// # Arguments +/// - `instrs`: The list of instructions to analyse. +/// - `idx`: The index of the particular instruction (i.e., the previous one) to examine. When calling this functio non-recursively, use the **last** instruction. +/// +/// # Returns +/// A double [`Option`] detailling what's possible: +/// - [`Some(Some(...))`] means that there was a function ID on top. +/// - [`Some(None)`] means that we _know_ there is _no_ function ID on top. +/// - [`None`] means that nothing was pushed, i.e., whatever was on top is still on top. +fn pushes_func_id(instrs: &[EdgeInstr], idx: usize) -> Option> { + // Pop the next instruction + let instr: &EdgeInstr = if idx < instrs.len() { + &instrs[idx] + } else { + // If we reached the last instruction, then we know no value was pushed :celebrate: + return None; + }; + + // Examine what it does + // NOTE: The BraneScript compiler only supports function calls over identifiers and projections. So we can ignore gnarly array stuff etc! + // NOTE: Actually... we know violently little statically of class calls in general, because they are fully pushed to dynamic land. We _could_ learn it by tracking + // a variable's contents over multiple edges, but that fucks; let's give up and only support direct calls for now. + match instr { + // What we're looking for! + EdgeInstr::Function { def } => Some(Some(*def)), + + // Things instructions only pop, potentially (accidentally) removing our function + // Jep just tell the thign we don't know, we don't need it for direct function calls + EdgeInstr::Pop {} | EdgeInstr::PopMarker {} | EdgeInstr::DynamicPop {} | EdgeInstr::VarSet { .. } => Some(None), + + // Alright some weird local branching; fuck it, also give up because we don't know which of the branches will do it + EdgeInstr::Branch { .. } | EdgeInstr::BranchNot { .. } => Some(None), + + // These instructions never pop- or push anything + EdgeInstr::VarDec { .. } | EdgeInstr::VarUndec { .. } => Some(None), + + // These instructions push invalid things _for sure_ + EdgeInstr::Cast { .. } + | EdgeInstr::Not {} + | EdgeInstr::Neg {} + | EdgeInstr::And {} + | EdgeInstr::Or {} + | EdgeInstr::Add {} + | EdgeInstr::Sub {} + | EdgeInstr::Mul {} + | EdgeInstr::Div {} + | EdgeInstr::Mod {} + | EdgeInstr::Eq {} + | EdgeInstr::Ne {} + | EdgeInstr::Lt {} + | EdgeInstr::Le {} + | EdgeInstr::Gt {} + | EdgeInstr::Ge {} + | EdgeInstr::Array { .. } + | EdgeInstr::ArrayIndex { .. } + | EdgeInstr::Instance { .. } + | EdgeInstr::Proj { .. } + | EdgeInstr::VarGet { .. } + | EdgeInstr::Boolean { .. } + | EdgeInstr::Integer { .. } + | EdgeInstr::Real { .. } + | EdgeInstr::String { .. } => Some(None), + } +} + +/// Analyses the edges in an [`Workflow`] to resolve function calls to the ID of the functions they call. +/// +/// # Arguments +/// - `wir`: The [`Workflow`] to analyse. +/// - `table`: A running [`VirtualSymTable`] that determines the current types in scope. +/// - `trace`: A stack of call pointers that keeps track of the trace of function calls. Allows us to avoid recursion. +/// - `stack_id`: The function ID currently known to be on the stack. Is [`None`] if we don't know this. +/// - `pc`: The program-counter-index of the edge to analyse. These are pairs of `(function, edge_idx)`, where main is referred to by [`usize::MAX`](usize). +/// - `breakpoint`: An optional program-counter-index that, if given, will not analyse that edge onwards (excluding it too). +/// +/// # Returns +/// A tuple with a [`HashMap`] that maps call indices (as program-counter-indices) to function IDs and an optional top call ID currently on the stack. +/// +/// Note that, if a call ID occurs in the map but has [`None`] as function ID, it means it does not map to a body (e.g., a builtin). +/// +/// # Errors +/// This function may error if we failed to statically discover the function IDs. +fn resolve_calls( + wir: &Workflow, + table: &SymTable, + trace: &mut Vec, + pc: ProgramCounter, + stack_id: Option, + breakpoint: Option, +) -> Result<(HashMap, Option), Error> { + // Quit if we're at the breakpoint + if let Some(breakpoint) = breakpoint { + if pc == breakpoint { + return Ok((HashMap::new(), None)); + } + } + + // Get the edge in the workflow + let edge: &Edge = match utils::get_edge(wir, pc) { + Some(edge) => edge, + None => return Ok((HashMap::new(), None)), + }; + + // Match to recursively process it + trace!("Attempting to resolve calls in {} ({:?})", pc.resolved(table), edge.variant()); + match edge { + Edge::Node { task, next, .. } => { + // Attempt to discover the return type of the Node. + let def: &TaskDef = match table.tasks.get(*task) { + Some(def) => def, + None => return Err(Error::UnknownTask { id: *task }), + }; + + // Alright, recurse with the next instruction + resolve_calls(wir, table, trace, pc.jump(*next), if def.func().ret.is_void() { stack_id } else { None }, breakpoint) + }, + + Edge::Linear { instrs, next } => { + // Analyse the instructions to find out if we can deduce a new `stack_id` + let stack_id: Option = if !instrs.is_empty() { pushes_func_id(instrs, instrs.len() - 1).unwrap_or(stack_id) } else { stack_id }; + + // Analyse the next one + resolve_calls(wir, table, trace, pc.jump(*next), stack_id, breakpoint) + }, + + Edge::Stop {} => Ok((HashMap::new(), None)), + + Edge::Branch { true_next, false_next, merge } => { + // First, analyse the branches + let (mut calls, mut stack_id): (HashMap<_, _>, Option) = + resolve_calls(wir, table, trace, pc.jump(*true_next), stack_id, merge.map(|merge| pc.jump(merge)))?; + if let Some(false_next) = false_next { + let (false_calls, false_stack) = resolve_calls(wir, table, trace, pc.jump(*false_next), stack_id, merge.map(|merge| pc.jump(merge)))?; + calls.extend(false_calls); + if stack_id != false_stack { + stack_id = None; + } + } + + // Analyse the remaining part next + if let Some(merge) = merge { + let (merge_calls, merge_stack) = resolve_calls(wir, table, trace, pc.jump(*merge), stack_id, breakpoint)?; + calls.extend(merge_calls); + stack_id = merge_stack; + } + + // Alright, return the found results + Ok((calls, stack_id)) + }, + + Edge::Parallel { branches, merge } => { + // Simply analyse all branches first. No need to worry about their return values and such, since that's not until the `Join`. + let mut calls: HashMap<_, _> = HashMap::new(); + for branch in branches { + calls.extend(resolve_calls(wir, table, trace, pc.jump(*branch), stack_id, breakpoint)?.0); + } + + // OK, then analyse the rest assuming the stack is unchanged (we can do that because the parallel's branches get clones) + let (new_calls, stack_id): (HashMap<_, _>, Option) = resolve_calls(wir, table, trace, pc.jump(*merge), stack_id, breakpoint)?; + calls.extend(new_calls); + Ok((calls, stack_id)) + }, + + Edge::Join { merge, next } => { + // Simply do the next, only _not_ resetting the stack ID if no value is returned. + resolve_calls(wir, table, trace, pc.jump(*next), if *merge == MergeStrategy::None { stack_id } else { None }, breakpoint) + }, + + Edge::Loop { cond, body, next } => { + // Traverse the three individually, using the stack ID of the codebody that precedes it + let (mut calls, mut cond_id): (HashMap<_, _>, Option) = + resolve_calls(wir, table, trace, pc.jump(*cond), stack_id, Some(pc.jump(*body - 1)))?; + let (body_calls, _): (HashMap<_, _>, Option) = resolve_calls(wir, table, trace, pc.jump(*body), cond_id, Some(pc.jump(*cond)))?; + calls.extend(body_calls); + if let Some(next) = next { + let (next_calls, next_id): (HashMap<_, _>, Option) = resolve_calls(wir, table, trace, pc.jump(*next), cond_id, breakpoint)?; + calls.extend(next_calls); + cond_id = next_id; + } + + // Done! + Ok((calls, cond_id)) + }, + + Edge::Call { input: _, result: _, next } => { + // Alright time to jump functions based on the current top-of-the-stack + let stack_id: usize = match stack_id { + Some(id) => id, + None => { + return Err(Error::CallingWithoutId { pc: pc.resolved(table) }); + }, + }; + + // We can early quit upon recursion + if trace.contains(&pc) { + let mut calls: HashMap = HashMap::from([(pc, stack_id)]); + let (next_calls, next_id): (HashMap<_, _>, Option) = resolve_calls(wir, table, trace, pc.jump(*next), None, breakpoint)?; + calls.extend(next_calls); + return Ok((calls, next_id)); + } + + // Add the mapping to the table + let mut calls: HashMap = HashMap::from([(pc, stack_id)]); + + // Resolve the call of the function (builtins simply return nothing, so are implicitly handled) + trace.push(pc); + let (call_calls, call_id): (HashMap<_, _>, Option) = resolve_calls(wir, table, trace, ProgramCounter::call(stack_id), None, None)?; + trace.pop(); + calls.extend(call_calls); + + // Then continue with the next one + let (next_calls, next_id): (HashMap<_, _>, Option) = resolve_calls(wir, table, trace, pc.jump(*next), call_id, breakpoint)?; + calls.extend(next_calls); + Ok((calls, next_id)) + }, + + Edge::Return { result: _ } => { + // If we're in the main function, this acts as an [`Elem::Stop`] with value + if pc.is_main() { + return Ok((HashMap::new(), None)); + } + + // To see whether we pass a function ID, consult the function definition + let def: &FunctionDef = match catch_unwind(|| table.func(pc.func_id)) { + Ok(def) => def, + Err(_) => return Err(Error::UnknownFunc { id: pc.func_id }), + }; + + // Only return the current one if the function returns void + if def.ret.is_void() { Ok((HashMap::new(), stack_id)) } else { Ok((HashMap::new(), None)) } + }, + } +} + +/// Attempts to find all non-recursive functions in the given WIR. +/// +/// The only moment when we don't consider a function inlinable is if the function call is: +/// - Recursive +/// - A builtin +/// - Undecidable +/// +/// # Arguments +/// - `wir`: The input [WIR](Workflow) to analyse. +/// - `calls`: The map of call indices to which function is actually called. +/// - `trace`: A trace of function IDs that we've "called". +/// - `pc`: Points to the current [`Edge`] to analyse. +/// - `breakpoint`: If given, then analysis should stop when this PC is hit. +/// - `inlinable`: The result we're recursively building. This set simply collects all function IDs and maps them to inlinable or not. If they are, then their ID is mapped to a list of functions on which the call depends (or else [`None`]). +/// +/// # Returns +/// A list of all function calls found (that are inlinable). This builds a dependency tree of which calls the given depends on. +fn find_inlinable_funcs( + wir: &Workflow, + calls: &HashMap, + trace: &mut Vec, + mut pc: ProgramCounter, + breakpoint: Option, + inlinable: &mut HashMap>>, +) -> HashSet { + // We shall now mix looping and recursion to lower the stack usage + // (Tim TopTip: If you don't, then it turns out workflows with many linear edges are too much for the default stack size) + let mut dependencies: HashSet = HashSet::new(); + loop { + // Stop on the breakpoint + if let Some(breakpoint) = breakpoint { + if pc == breakpoint { + return dependencies; + } + } + + // Attempt to get the edge + let edge: &Edge = match utils::get_edge(wir, pc) { + Some(edge) => edge, + None => return dependencies, + }; + + // Match on its kind + trace!("Finding inlinable functions in {} ({:?})", pc.resolved(&wir.table), edge.variant()); + match edge { + Edge::Node { next, .. } | Edge::Linear { next, .. } => { + // Doesn't call any functions, so just proceed with the next one + pc.jump_mut(*next); + }, + + Edge::Stop {} => return dependencies, + + Edge::Branch { true_next, false_next, merge } => { + // Analyse the left branch... + dependencies.extend(find_inlinable_funcs(wir, calls, trace, pc.jump(*true_next), merge.map(|merge| pc.jump(merge)), inlinable)); + // ...the right branch... + if let Some(false_next) = false_next { + dependencies.extend(find_inlinable_funcs(wir, calls, trace, pc.jump(*false_next), merge.map(|merge| pc.jump(merge)), inlinable)); + } + // ...and the merge! + if let Some(merge) = merge { + // We do the loop recursion here to avoid having to increase stack usage + pc.jump_mut(*merge); + continue; + } + return dependencies; + }, + + Edge::Parallel { branches, merge } => { + // Collect all the branches + for branch in branches { + dependencies.extend(find_inlinable_funcs(wir, calls, trace, pc.jump(*branch), Some(pc.jump(*merge)), inlinable)); + } + + // Run merge and done is Cees + pc.jump_mut(*merge); + }, + + Edge::Join { next, .. } => { + pc.jump_mut(*next); + }, + + Edge::Loop { cond, body, next } => { + // Traverse the condition... + let mut dependencies: HashSet = find_inlinable_funcs(wir, calls, trace, pc.jump(*cond), Some(pc.jump(*body - 1)), inlinable); + // ...the body... + dependencies.extend(find_inlinable_funcs(wir, calls, trace, pc.jump(*body), Some(pc.jump(*cond)), inlinable)); + // ...and finally, the next step, if any + if let Some(next) = next { + pc.jump_mut(*next); + continue; + } + return dependencies; + }, + + Edge::Call { next, .. } => { + // OK, the exciting point! + + // Resolve the function ID we're calling + let func_id: usize = match calls.get(&pc) { + Some(id) => *id, + None => { + panic!("Encountered unresolved call after running call analysis"); + }, + }; + let def: &FunctionDef = match wir.table.funcs.get(func_id) { + Some(def) => def, + None => panic!("Failed to get definition of function {func_id} after call analysis"), + }; + // let mut dependencies: HashSet = find_inlinable_funcs(wir, calls, trace, pc.jump(*next), None, inlinable); + dependencies.insert(func_id); + + // Functions are not inlinable if builtins; if so, return + if BuiltinFunctions::is_builtin(&def.name) { + trace!("Function {} ('{}') is not inlinable because it is a builtin", func_id, def.name); + inlinable.insert(func_id, None); + pc.jump_mut(*next); + continue; + } + + // Examine if this call would introduce a recursive problem + if trace.contains(&func_id) { + // It's been in our callstack before - that means recursion! + // Change our minds about its inlinability + trace!("Function {} ('{}') is not inlinable because it is recursive", func_id, def.name); + inlinable.insert(func_id, None); + pc.jump_mut(*next); + continue; + } + if inlinable.contains_key(&func_id) { + // We've already seen this one! However, _don't_ change our mind about its inlinability because it means a repeated function call + // NOTE: No need to go into the call body, as we've done this the first time we saw it + trace!("Function {} ('{}') is skipped because we have seen it before", func_id, def.name); + pc.jump_mut(*next); + continue; + } + trace!("Function {} ('{}') is assumed as inlinable until we see it recursive", func_id, def.name); + + // For now assume that the function exist with no deps; we inject these later + inlinable.insert(func_id, Some(HashSet::new())); + + // If we get this far, recurse into the body + trace.push(func_id); + let func_deps: HashSet = find_inlinable_funcs(wir, calls, trace, ProgramCounter::call(func_id), None, inlinable); + trace.pop(); + + // Now we can inject the entries + if let Some(deps) = inlinable.get_mut(&func_id).unwrap() { + deps.extend(func_deps); + } + + // Return the dependencies in _this_ body. + pc.jump_mut(*next); + }, + + Edge::Return { result: _ } => return dependencies, + } + } +} + +/// Orders a given map of inlinable functions such that, when ordered inline, every function will have its calls inlined if possible. +/// +/// More specifically, the order makes sure that functions on which other functions depend (i.e., they make calls to it) are inlined first so that they can be inlined properly in the functions calling them. +/// +/// # Arguments +/// - `ordered`: The vector of ordered function IDs that is being populated. The inline order is left-to-right (i.e., the leftmost function should never have a dependency, the second-to-left can only depend on the leftmost, etc). +/// - `inlinable`: The map of inlinable functions to their dependencies. +fn order_inlinable<'i>(ordered: &mut Vec, inlinable: &HashMap>>, mut next: impl Iterator) { + // Get a function to inline + let func_id: usize = match next.next() { + Some(id) => *id, + None => return, + }; + let deps: &HashSet = match inlinable.get(&func_id).unwrap() { + Some(deps) => deps, + None => { + // No need to inline this one, so just continue + trace!("order_inlinable(): Not considering function {func_id} because it is not inlinable (deps is None)"); + order_inlinable(ordered, inlinable, next); + return; + }, + }; + + // Examine the dependencies + if deps.is_empty() { + // Base-case; add to the list first before any other + trace!("order_inlinable(): Function {func_id} is inlinable but has no dependencies"); + ordered.push(func_id); + order_inlinable(ordered, inlinable, next); + trace!("order_inlinable(): New result: {ordered:?}"); + } else { + // Recursive case: add all the dependencies first + trace!("order_inlinable(): Function {func_id} is inlinable and has dependencies"); + order_inlinable(ordered, inlinable, deps.iter()); + ordered.push(func_id); + trace!("order_inlinable(): New result: {ordered:?}"); + order_inlinable(ordered, inlinable, next); + } +} + +/// Given a vector, removes all duplicates from it. +/// +/// Retains the **first** occurrences. +/// +/// # Arguments +/// - `data`: The vector to deduplicate. +fn keep_unique_first(data: &mut Vec) { + // A buffer of seen elements + let mut seen: HashSet = HashSet::new(); + data.retain(|elem| { + if seen.contains(elem) { + false + } else { + seen.insert(*elem); + true + } + }); +} + +/// Traverses the given function body and replaces all [`Edge::Return`] with an [`Edge::Linear`] pointing to the given edge index. +/// +/// Also bumps definition pointers with the given values. This is necessary because we need to pull function scopes one layer up. +/// +/// # Arguments +/// - `edges`: The edges to traverse. +/// - `calls`: The map of program counters to calls that we update with any nested call's' new position. +/// - `func_id`: The ID of this function. +/// - `start_idx`: The index to add all next indices. +/// - `ret_idx`: The index to point the returning linears to. +/// - `pc`: Points to the current [`Edge`] to replace potentially. +/// - `breakpoint`: If given, then analysis should stop when this PC is hit. +fn prep_func_body( + edges: &mut [Edge], + calls: &mut HashMap, + func_id: usize, + start_idx: usize, + ret_idx: usize, + pc: usize, + breakpoint: Option, +) { + // Stop on the breakpoint + if let Some(breakpoint) = breakpoint { + if pc == breakpoint { + return; + } + } + // Attempt to get the edge + let edge: &mut Edge = match edges.get_mut(pc) { + Some(edge) => edge, + None => return, + }; + + // Match on its kind + match edge { + Edge::Node { next, .. } | Edge::Linear { next, .. } => { + // Update the nexts + let old_next: usize = *next; + *next += start_idx; + + // Continue traversing + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_next, breakpoint); + }, + + Edge::Stop {} => (), + + Edge::Branch { true_next, false_next, merge } => { + let (old_true_next, old_false_next, old_merge): (usize, Option, Option) = (*true_next, *false_next, *merge); + + // Update the nexts + *true_next += start_idx; + if let Some(false_next) = false_next { + *false_next += start_idx; + } + if let Some(merge) = merge { + *merge += start_idx; + } + + // Analyse the left branch... + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_true_next, old_merge); + // ...the right branch... + if let Some(old_false_next) = old_false_next { + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_false_next, old_merge); + } + // ...and the merge! + if let Some(old_merge) = old_merge { + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_merge, breakpoint); + } + }, + + Edge::Parallel { branches, merge } => { + let (old_branches, old_merge): (Vec, usize) = (branches.clone(), *merge); + + // Update the nexts + for branch in branches { + *branch += start_idx; + } + *merge += start_idx; + + // Collect all the branches + for old_branch in old_branches { + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_branch, Some(old_merge)); + } + + // Run merge and done is Cees + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_merge, breakpoint); + }, + + Edge::Join { next, .. } => { + let old_next: usize = *next; + *next += start_idx; + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_next, breakpoint); + }, + + Edge::Loop { cond, body: lbody, next } => { + let (old_cond, old_lbody, old_next): (usize, usize, Option) = (*cond, *lbody, *next); + + // Update the nexts + *cond += start_idx; + *lbody += start_idx; + if let Some(next) = next { + *next += start_idx; + } + + // Traverse the condition... + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_cond, Some(old_lbody - 1)); + // ...the body... + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_lbody, Some(old_cond)); + // ...and finally, the next step, if any + if let Some(old_next) = old_next { + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_next, breakpoint); + } + }, + + Edge::Call { next, .. } => { + let old_next: usize = *next; + + // Update the next + *next += start_idx; + + // Update the call list with this dude's new position + calls.insert( + ProgramCounter::new(FunctionId::Main, start_idx + pc), + *calls.get(&ProgramCounter::new(func_id, pc)).unwrap_or_else(|| panic!("Encountered unresolved call after call ID analysis")), + ); + + // Prepare the remainder + prep_func_body(edges, calls, func_id, start_idx, ret_idx, old_next, breakpoint); + }, + + Edge::Return { result: _ } => { + // Yank it + trace!("Yanking return edge at '{pc}' with a linear edge to '{ret_idx}'"); + *edge = Edge::Linear { instrs: vec![], next: ret_idx }; + }, + } +} + +/// Inlines the given set of functions in the given WIR function body. +/// +/// Note that this is a rather confusing operation space-wise. To prevent program counter pointers from becoming invalid, we simply replace the call with an empty [`Edge::Linear`] that connects to the body appended at the end of the stream. Then, the body connects back to the call's old `next`. +/// +/// # Arguments +/// - `body`: A [WIR](Workflow) function body to inline functions _in_. +/// - `calls`: The map of call indices to which function is actually called. +/// - `funcs`: A map of call IDs to function bodies ready to be substituted in the `body`. +/// - `inlinable`: A collection of functions that determines if functions are inlinable. If the set of `deps` is [`Some`], it's inlinable; else it's not. +/// - `func_id`: The ID of the function we're inlining. +/// - `pc`: Points to the current [`Edge`] to analyse. +/// - `breakpoint`: If given, then analysis should stop when this PC is hit. +// It's a compiler function, too many arguments are kinda its thing :P No it's not worth it to come up with structs for this. +#[allow(clippy::too_many_arguments)] +fn inline_funcs_in_body( + body: &mut Vec, + calls: &mut HashMap, + funcs: &HashMap>, + inlinable: &HashMap>>, + func_id: FunctionId, + pc: usize, + breakpoint: Option, +) { + // Stop on the breakpoint + if let Some(breakpoint) = breakpoint { + if pc == breakpoint { + return; + } + } + // Attempt to get the edge + let body_len: usize = body.len(); + let edge: &mut Edge = match body.get_mut(pc) { + Some(edge) => edge, + None => return, + }; + + // Match on its kind + match edge { + Edge::Node { next, .. } | Edge::Linear { next, .. } => { + let next: usize = *next; + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, next, breakpoint) + }, + + Edge::Stop {} => (), + + Edge::Branch { true_next, false_next, merge } => { + let (true_next, false_next, merge): (usize, Option, Option) = (*true_next, *false_next, *merge); + + // Analyse the left branch... + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, true_next, merge); + // ...the right branch... + if let Some(false_next) = false_next { + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, false_next, merge) + } + // ...and the merge! + if let Some(merge) = merge { + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, merge, breakpoint) + } + }, + + Edge::Parallel { branches, merge } => { + let (branches, merge): (Vec, usize) = (branches.clone(), *merge); + + // Collect all the branches + for branch in branches { + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, branch, Some(merge)); + } + + // Run merge and done is Cees + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, merge, breakpoint); + }, + + Edge::Join { next, .. } => { + let next: usize = *next; + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, next, breakpoint) + }, + + Edge::Loop { cond, body: lbody, next } => { + let (cond, lbody, next): (usize, usize, Option) = (*cond, *lbody, *next); + + // Traverse the condition... + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, cond, Some(lbody - 1)); + // ...the body... + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, lbody, Some(cond)); + // ...and finally, the next step, if any + if let Some(next) = next { + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, next, breakpoint); + } + }, + + Edge::Call { next, .. } => { + let next: usize = *next; + + // Resolve the function ID we're calling + let call_id: usize = match calls.get(&ProgramCounter::new(func_id, pc)) { + Some(id) => *id, + None => { + panic!("Encountered unresolved call after running inline analysis"); + }, + }; + + // Assert this is an inlinable function (and not external) + if inlinable.get(&call_id).map(|deps| deps.is_none()).unwrap_or(true) { + // Simply skip after doing the next + trace!("Not inlining function call to function {call_id} at {pc}"); + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, next, breakpoint); + return; + } + trace!("Inlining function call to function {call_id} at {pc}"); + + // Otherwise, yank the call with a linear that refers to the inlined body instead (we'll put it after all the other edges to avoid them moving) + // Note: we insert a pop to consume the function reference pushed on the stack to execute the call + *edge = Edge::Linear { instrs: vec![EdgeInstr::Pop {}], next: body_len }; + + // Prepare the call body by replacing returns with normal links and by bumping all definitions + let mut call_body: Vec = funcs + .get(&call_id) + .unwrap_or_else(|| { + panic!("Encountered function ID '{call_id}' without function body after inline analysis (might be an uninlined dependency)") + }) + .clone(); + prep_func_body(&mut call_body, calls, call_id, body_len, next, 0, None); + + // Append it to the main body and the inlining is complete + body.extend(call_body); + + // End with the next edges + inline_funcs_in_body(body, calls, funcs, inlinable, func_id, next, breakpoint); + }, + + Edge::Return { result: _ } => (), + } +} + + + + + +/***** SIMPLIFICATION FUNCTIONS *****/ +/// Attempts to inline functions in the WIR as much as possible. +/// +/// The only moment when we don't is if the function call is: +/// - Recursive +/// - A builtin +/// - Undecidable +/// +/// # Arguments +/// - `wir`: The input [WIR](Workflow) to simply. +/// - `calls`: The map of call indices to which function is actually called. +/// +/// # Returns +/// The same `wir` as given, but then optimized. +/// +/// # Errors +/// This function may error if the input workflow is incoherent. +pub fn inline_functions(mut wir: Workflow, calls: &mut HashMap) -> Workflow { + // Analyse which functions in the WIR are non-recursive + let mut inlinable: HashMap>> = HashMap::with_capacity(calls.len()); + find_inlinable_funcs(&wir, calls, &mut vec![], ProgramCounter::start(), None, &mut inlinable); + debug!( + "Inlinable functions: {}", + inlinable + .iter() + .filter_map(|(id, deps)| if let Some(deps) = deps { + Some(format!( + "'{}' (depends on {})", + wir.table.funcs.get(*id).map(|def| def.name.as_str()).unwrap_or("???"), + deps.iter() + .map(|id| format!("'{}'", wir.table.funcs.get(*id).map(|def| def.name.as_str()).unwrap_or("???"))) + .collect::>() + .join(", "), + )) + } else { + None + }) + .collect::>() + .join(", ") + ); + + // Order them so that we satisfy function dependencies + let mut inline_order: Vec = Vec::with_capacity(inlinable.len()); + order_inlinable(&mut inline_order, &inlinable, inlinable.keys()); + keep_unique_first(&mut inline_order); + debug!( + "Inline order: {}", + inline_order + .iter() + .map(|id| format!("'{}'", wir.table.funcs.get(*id).map(|def| def.name.as_str()).unwrap_or("???"),)) + .collect::>() + .join(", ") + ); + + { + // Tear open the Workflow to satisfy the borrow checker + let Workflow { id: _, graph: wir_graph, metadata: _, funcs: wir_funcs, table: wir_table, user: _ } = &mut wir; + + // Extract the graph behind the Arc + let mut graph: Arc> = Arc::new(vec![]); + std::mem::swap(&mut graph, wir_graph); + let mut graph: Vec = Arc::into_inner(graph).unwrap(); + // Extract the functions behind the Arc + let mut funcs: Arc>> = Arc::new(HashMap::new()); + std::mem::swap(&mut funcs, wir_funcs); + let mut funcs: HashMap> = Arc::into_inner(funcs).unwrap(); + // Extract the WIR table + let mut table: Arc = Arc::new(SymTable::new()); + std::mem::swap(&mut table, wir_table); + let table: SymTable = Arc::into_inner(table).unwrap(); + + // Inline non-main function bodies first + let mut new_funcs: HashMap> = HashMap::new(); + for id in inline_order { + // Acquire the body + let mut new_body: Vec = funcs.get(&id).unwrap().clone(); + + // Inline the functions in this body + debug!("Inlining functions in function {id}"); + inline_funcs_in_body(&mut new_body, calls, &new_funcs, &inlinable, FunctionId::Func(id), 0, None); + new_funcs.insert(id, new_body); + } + funcs = new_funcs; + + // Now inline the main with all function bodies inlined correctly + debug!("Inlining functions in main"); + inline_funcs_in_body(&mut graph, calls, &funcs, &inlinable, FunctionId::Main, 0, None); + + // Write the functions and graphs back + let mut table: Arc = Arc::new(table); + std::mem::swap(wir_table, &mut table); + let mut funcs: Arc>> = Arc::new(funcs); + std::mem::swap(wir_funcs, &mut funcs); + let mut graph: Arc> = Arc::new(graph); + std::mem::swap(wir_graph, &mut graph); + } + + // OK, we did all we could + wir +} + + + + + +/***** LIBRARY *****/ +/// Simplifies the given WIR-workflow as much as possible to increase the compatability with checker workflows. +/// +/// Most importantly, it: +/// - Attempts to inline functions as long as they're non-recursive (since functions are not supported) +/// +/// # Arguments +/// - `wir`: The input [WIR](Workflow) to simply. +/// +/// # Returns +/// A tuple of the same `wir` as given, but then optimized, and a mapping of (remaining) [`Edge::Call`]s to whatever function they actually map. +/// +/// # Errors +/// This function may error if the input workflow is incoherent. +pub fn simplify(mut wir: Workflow) -> Result<(Workflow, HashMap), Error> { + // Analyse call dependencies first + let (mut calls, _): (HashMap, _) = resolve_calls(&wir, &wir.table, &mut vec![], ProgramCounter::start(), None, None)?; + debug!("Resolved calls as: {:?}", calls.iter().map(|(pc, id)| (format!("{}", pc.resolved(&wir.table)), *id)).collect::>()); + + // Simplify functions as much as possible + wir = inline_functions(wir, &mut calls); + + // Done! + if tracing::level_filters::STATIC_MAX_LEVEL >= Level::DEBUG { + let mut buf: Vec = vec![]; + brane_ast::traversals::print::ast::do_traversal(&wir, &mut buf).unwrap(); + debug!("Simplified workflow:\n\n{}\n", String::from_utf8_lossy(&buf)); + } + Ok((wir, calls)) +} diff --git a/brane-chk/src/workflow/tests.rs b/brane-chk/src/workflow/tests.rs new file mode 100644 index 00000000..d805caea --- /dev/null +++ b/brane-chk/src/workflow/tests.rs @@ -0,0 +1,210 @@ +// TESTS.rs +// by Lut99 +// +// Created: +// 18 Oct 2024, 11:08:50 +// Last edited: +// 29 Apr 2025, 13:40:32 +// Auto updated? +// Yes +// +// Description: +//! Implements tests for the [`Workflow`](super::spec::Workflow) (or +//! rather, its compiler(s)). +// + +use std::ffi::OsStr; +use std::path::PathBuf; +use std::sync::Arc; + +use brane_ast::{CompileResult, ParserOptions, compile_program}; +use brane_shr::utilities::{create_data_index_from, create_package_index_from, test_on_dsl_files_in}; +use policy_reasoner::workflow::Workflow; +use specifications::data::DataIndex; +use specifications::package::PackageIndex; +use specifications::wir as ast; +use tracing::{Level, debug}; + +use crate::workflow::compile::compile; + + +/***** CONSTANTS *****/ +/// Defines the location of the tests +pub(crate) const TESTS_DIR: &str = "../../../tests"; + + + + + +/***** HELPER FUNCTIONS *****/ +/// Injects some (random) data in a workflow to simulate required information from the Brane runtime. +/// +/// Specifically, injects: +/// - The end user of the workflow. +/// +/// # Arguments +/// - `wir`: A (mutable reference to a) BraneScript [`Workflow`](ast::Workflow). +fn prepare_workflow(wir: &mut ast::Workflow) { + // Inject the user with a random name + wir.user = Arc::new(Some(names::three::rand().into())); +} + + + + + +/***** LIBRARY *****/ +/// Run all the BraneScript tests +#[test] +fn test_checker_workflow_unoptimized() { + let tests_path: PathBuf = PathBuf::from(TESTS_DIR); + + // Run the compiler for every applicable DSL file + test_on_dsl_files_in("BraneScript", &tests_path, |path: PathBuf, code: String| { + // Start by the name to always know which file this is + println!("{}", (0..80).map(|_| '-').collect::()); + println!("File '{}' gave us:", path.display()); + + // Skip some files, sadly + if let Some(name) = path.file_name() { + if name == OsStr::new("class.bs") { + println!("Skipping test, since instance calling is not supported in checker workflows..."); + println!("{}\n\n", (0..80).map(|_| '-').collect::()); + return; + } + } + + // Load the package index + let pindex: PackageIndex = create_package_index_from(tests_path.join("packages")); + let dindex: DataIndex = create_data_index_from(tests_path.join("data")); + + // Compile the raw source to WIR + let mut wir: ast::Workflow = match compile_program(code.as_bytes(), &pindex, &dindex, &ParserOptions::bscript()) { + CompileResult::Workflow(wir, warns) => { + // Print warnings if any + for w in warns { + w.prettyprint(path.to_string_lossy(), &code); + } + wir + }, + CompileResult::Eof(err) => { + // Print the error + err.prettyprint(path.to_string_lossy(), &code); + panic!("Failed to compile to WIR (see output above)"); + }, + CompileResult::Err(errs) => { + // Print the errors + for e in errs { + e.prettyprint(path.to_string_lossy(), &code); + } + panic!("Failed to compile to WIR (see output above)"); + }, + + _ => { + unreachable!(); + }, + }; + + // Insert some additional content + prepare_workflow(&mut wir); + + // Print the WIR in debug mode + if tracing::level_filters::STATIC_MAX_LEVEL >= Level::DEBUG { + // Write the processed graph + let mut buf: Vec = vec![]; + brane_ast::traversals::print::ast::do_traversal(&wir, &mut buf).unwrap(); + debug!("Compiled workflow:\n\n{}\n", String::from_utf8_lossy(&buf)); + } + + // Next, compile to the checker's workflow + let wf: Workflow = match compile(wir) { + Ok(wf) => wf, + Err(err) => { + panic!("Failed to compile WIR to CheckerWorkflow: {err}"); + }, + }; + + // Now print the file for prettyness + println!("{}", wf.visualize()); + println!("{}\n\n", (0..80).map(|_| '-').collect::()); + }); +} + +/// Run all the BraneScript tests _with_ optimization +#[test] +fn test_checker_workflow_optimized() { + let tests_path: PathBuf = PathBuf::from(TESTS_DIR); + + // Run the compiler for every applicable DSL file + test_on_dsl_files_in("BraneScript", &tests_path, |path: PathBuf, code: String| { + // Start by the name to always know which file this is + println!("{}", (0..80).map(|_| '-').collect::()); + println!("(Optimized) File '{}' gave us:", path.display()); + + // Skip some files, sadly + if let Some(name) = path.file_name() { + if name == OsStr::new("class.bs") { + println!("Skipping test, since instance calling is not supported in checker workflows..."); + println!("{}\n\n", (0..80).map(|_| '-').collect::()); + return; + } + } + + // Load the package index + let pindex: PackageIndex = create_package_index_from(tests_path.join("packages")); + let dindex: DataIndex = create_data_index_from(tests_path.join("data")); + + // Compile the raw source to WIR + let mut wir: ast::Workflow = match compile_program(code.as_bytes(), &pindex, &dindex, &ParserOptions::bscript()) { + CompileResult::Workflow(wir, warns) => { + // Print warnings if any + for w in warns { + w.prettyprint(path.to_string_lossy(), &code); + } + wir + }, + CompileResult::Eof(err) => { + // Print the error + err.prettyprint(path.to_string_lossy(), &code); + panic!("Failed to compile to WIR (see output above)"); + }, + CompileResult::Err(errs) => { + // Print the errors + for e in errs { + e.prettyprint(path.to_string_lossy(), &code); + } + panic!("Failed to compile to WIR (see output above)"); + }, + + _ => { + unreachable!(); + }, + }; + + // Insert some additional content + prepare_workflow(&mut wir); + + // Print the WIR in debug mode + if tracing::level_filters::STATIC_MAX_LEVEL >= Level::DEBUG { + // Write the processed graph + let mut buf: Vec = vec![]; + brane_ast::traversals::print::ast::do_traversal(&wir, &mut buf).unwrap(); + debug!("Compiled workflow:\n\n{}\n", String::from_utf8_lossy(&buf)); + } + + // Next, compile to the checker's workflow + let mut wf: Workflow = match compile(wir) { + Ok(wf) => wf, + Err(err) => { + panic!("Failed to compile WIR to CheckerWorkflow: {err}"); + }, + }; + + // Slide in that optimization + wf.optimize(); + + // Now print the file for prettyness + println!("{}", wf.visualize()); + println!("{}\n\n", (0..80).map(|_| '-').collect::()); + }); +} diff --git a/brane-chk/src/workflow/utils.rs b/brane-chk/src/workflow/utils.rs new file mode 100644 index 00000000..16b591fc --- /dev/null +++ b/brane-chk/src/workflow/utils.rs @@ -0,0 +1,30 @@ +// UTILS.rs +// by Lut99 +// +// Created: +// 18 Oct 2024, 11:13:13 +// Last edited: +// 29 Apr 2025, 13:40:43 +// Auto updated? +// Yes +// +// Description: +//! Defines a few utilities using across compilation modules. +// + +use specifications::pc::ProgramCounter; + + +/***** LIBRARY FUNCTIONS *****/ +/// Gets a workflow edge from a PC. +/// +/// # Arguments +/// - `wir`: The [`ast::Workflow`] to get the edge from. +/// - `pc`: The program counter that points to the edge (hopefully). +/// +/// # Returns +/// The edge the `pc` pointed to, or [`None`] if it was out-of-bounds. +#[inline] +pub fn get_edge(wir: &specifications::wir::Workflow, pc: ProgramCounter) -> Option<&specifications::wir::Edge> { + if pc.func_id.is_main() { wir.graph.get(pc.edge_idx) } else { wir.funcs.get(&pc.func_id.id()).and_then(|edges| edges.get(pc.edge_idx)) } +} diff --git a/brane-cli-c/Cargo.toml b/brane-cli-c/Cargo.toml index 9d2c8683..484c6872 100644 --- a/brane-cli-c/Cargo.toml +++ b/brane-cli-c/Cargo.toml @@ -21,7 +21,7 @@ humanlog.workspace = true libc = "0.2.154" log = "0.4.22" parking_lot = "0.12.1" -tokio = "1.38.0" +tokio = "1.42.0" brane-ast = { path = "../brane-ast" } brane-cli = { path = "../brane-cli" } diff --git a/brane-cli-c/src/lib.rs b/brane-cli-c/src/lib.rs index e67f1c23..e82c9418 100644 --- a/brane-cli-c/src/lib.rs +++ b/brane-cli-c/src/lib.rs @@ -4,7 +4,7 @@ // Created: // 14 Jun 2023, 17:38:09 // Last edited: -// 04 Mar 2024, 13:33:55 +// 02 May 2025, 15:05:31 // Auto updated? // Yes // @@ -27,7 +27,6 @@ use std::rc::Rc; use std::sync::{Arc, Once}; use std::time::Instant; -use brane_ast::ast::Workflow; use brane_ast::state::CompileState; use brane_ast::traversals::print::ast; use brane_ast::{CompileResult, Error as AstError, ParserOptions, Warning as AstWarning}; @@ -40,6 +39,7 @@ use log::{debug, error, info, trace}; use parking_lot::{Mutex, MutexGuard}; use specifications::data::DataIndex; use specifications::package::PackageIndex; +use specifications::wir::Workflow; use tokio::runtime::{Builder, Runtime}; @@ -1174,8 +1174,8 @@ pub unsafe extern "C" fn fvalue_serialize(fvalue: *const FullValue, data_dir: *c // Serialize the result only if there is anything to serialize let mut sfvalue: String = String::new(); - if fvalue != &FullValue::Void { - writeln!(&mut sfvalue, "\nWorkflow returned value {}", style(format!("'{fvalue}'")).bold().cyan()).unwrap(); + if !matches!(fvalue, FullValue::Void) { + writeln!(&mut sfvalue, "\nWorkflow returned value {}", style(format!("'{}'", fvalue)).bold().cyan()).unwrap(); // Treat some values special match fvalue { @@ -1342,7 +1342,7 @@ pub unsafe extern "C" fn vm_free(vm: *mut VirtualMachine) { /// - `vm`: The [`VirtualMachine`] that we execute with. This determines which backend to use. /// - `workflow`: The compiled workflow to execute. /// - `prints`: A newly allocated string which represents any stdout- or stderr prints done during workflow execution. Will be [`NULL`] if there is an error (see below). -/// - `result`: A [`FullValue`] which represents the return value of the workflow. Will be [`NULL`] if there is an error (see below). +/// - `result`: A [`FullValue`] which represents the return value of the workflow, and a [`ProgramCounter`] that denotes which instruction produced it (or [`None`] if the workflow was empty). Will be [`NULL`] if there is an error (see below). /// /// # Returns /// An [`Error`]-struct that contains the error occurred, or [`NULL`] otherwise. diff --git a/brane-cli/Cargo.toml b/brane-cli/Cargo.toml index 94a0b116..a14a5d7f 100644 --- a/brane-cli/Cargo.toml +++ b/brane-cli/Cargo.toml @@ -39,7 +39,7 @@ log = "0.4.22" names.workspace = true parking_lot = "0.12.1" path-clean = "1.0.0" -prettytable-rs = "0.10.0" +prettytable = "0.10.0" rand = "0.9.0" rustls = "0.21.6" rustyline = "15.0.0" diff --git a/brane-cli/src/check.rs b/brane-cli/src/check.rs index 676a3edc..8dbac0cb 100644 --- a/brane-cli/src/check.rs +++ b/brane-cli/src/check.rs @@ -4,7 +4,7 @@ // Created: // 02 Feb 2024, 11:08:20 // Last edited: -// 08 Feb 2024, 17:18:29 +// 14 Nov 2024, 17:58:12 // Auto updated? // Yes // @@ -17,7 +17,7 @@ use std::io::Read; use std::sync::Arc; use std::{fs, io}; -use brane_ast::{CompileResult, Workflow}; +use brane_ast::CompileResult; use brane_dsl::{Language, ParserOptions}; use console::style; use error_trace::trace; @@ -26,6 +26,7 @@ use specifications::data::DataIndex; use specifications::driving::{CheckReply, CheckRequest, DriverServiceClient}; use specifications::package::PackageIndex; use specifications::profiling::{self}; +use specifications::wir::Workflow; pub use crate::errors::CheckError as Error; use crate::instance::InstanceInfo; diff --git a/brane-cli/src/data.rs b/brane-cli/src/data.rs index 8a192e86..a6312b02 100644 --- a/brane-cli/src/data.rs +++ b/brane-cli/src/data.rs @@ -4,7 +4,7 @@ // Created: // 12 Sep 2022, 17:39:06 // Last edited: -// 26 Jul 2023, 09:36:57 +// 02 May 2025, 15:03:01 // Auto updated? // Yes // @@ -19,8 +19,6 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::Duration; -use brane_ast::Workflow; -use brane_ast::ast::Edge; use brane_shr::fs::copy_dir_recursively_async; use brane_shr::utilities::is_ip_addr; use brane_tsk::spec::LOCALHOST; @@ -36,6 +34,7 @@ use reqwest::tls::{Certificate, Identity}; use reqwest::{Client, ClientBuilder, Proxy}; use specifications::data::{AccessKind, AssetInfo, DataIndex, DataInfo, DataName}; use specifications::registering::DownloadAssetRequest; +use specifications::wir::{Edge, Workflow}; use tempfile::TempDir; use tokio::fs as tfs; use tokio::io::AsyncWriteExt; @@ -57,6 +56,7 @@ use crate::utils::{ensure_dataset_dir, ensure_datasets_dir, get_dataset_dir}; /// - `certs_dir`: The directory where certificates are stored. Expected to contain nested directories that store the certs by domain ID. /// - `data_dir`: The directory to download the dataset to. /// - `name`: The name of the dataset to download. +/// - `workflow`: A workflow for which we're downloading a result. /// - `access`: The locations where it is available. /// /// # Returns @@ -89,7 +89,7 @@ pub async fn download_data( let location: &str = access.keys().choose(&mut rng).unwrap(); // Send a GET-request to resolve that location to a delegate - let registry_addr = format!("{api_endpoint}/infra/registries/{location}"); + let registry_addr = format!("http://{api_endpoint}/infra/registries/{location}"); let res = reqwest::get(®istry_addr).await.map_err(|source| DataError::RequestError { what: "registry", address: registry_addr.clone(), source })?; @@ -144,7 +144,7 @@ pub async fn download_data( } /* Step 4: Build the client. */ - let download_addr: String = format!("{registry_addr}/data/download/{name}"); + let download_addr: String = format!("https://{registry_addr}/data/download/{name}"); debug!("Sending download request to '{}'...", download_addr); let mut client: ClientBuilder = Client::builder().use_rustls_tls().add_root_certificate(ca_cert).identity(identity).tls_sni(!is_ip_addr(&download_addr)); @@ -335,7 +335,7 @@ pub async fn download( let instance_info: InstanceInfo = InstanceInfo::from_active_path().map_err(|source| DataError::InstanceInfoError { source })?; // Fetch a new, remote DataIndex to get up-to-date entries - let data_addr: String = format!("{}/data/info", instance_info.api); + let data_addr: String = format!("http://{}/data/info", instance_info.api); let index: DataIndex = brane_tsk::api::get_data_index(&data_addr).await.map_err(|source| DataError::RemoteDataIndexError { address: data_addr, source })?; diff --git a/brane-cli/src/errors.rs b/brane-cli/src/errors.rs index 1e2af176..628205c1 100644 --- a/brane-cli/src/errors.rs +++ b/brane-cli/src/errors.rs @@ -4,7 +4,7 @@ // Created: // 17 Feb 2022, 10:27:28 // Last edited: -// 07 Mar 2024, 14:16:08 +// 02 May 2025, 14:16:13 // Auto updated? // Yes // @@ -15,6 +15,7 @@ use std::error::Error; use std::path::PathBuf; +use brane_exe::value::DataId; use brane_shr::formatters::{BlockFormatter, PrettyListFormatter}; use reqwest::StatusCode; use specifications::address::Address; @@ -572,7 +573,7 @@ pub enum InstanceError { IllegalInstanceName { raw: String, illegal_char: char }, /// Failed to parse an address from the hostname (and a little modification). #[error("Failed to convert hostname to a valid address")] - AddressParseError { source: specifications::address::AddressError }, + AddressParseError { source: specifications::address::AddressParseError }, /// Failed to send a request to the remote instance. #[error( "Failed to send request to the instance API at '{address}' (if this is something on your end, you may skip this check by providing \ @@ -867,6 +868,9 @@ pub enum RunError { /// Failed to download remote dataset. #[error("Failed to download remote dataset")] DataDownloadError { source: DataError }, + /// Workflow returned result without PC. + #[error("Workflow returned data \"{data}\" without providing information where in the workflow it's downloaded from.")] + DataDownloadWithoutPc { data: DataId }, /// Failed to read the source from stdin #[error("Failed to read source from stdin")] diff --git a/brane-cli/src/instance.rs b/brane-cli/src/instance.rs index 37858769..a2d4abc9 100644 --- a/brane-cli/src/instance.rs +++ b/brane-cli/src/instance.rs @@ -4,7 +4,7 @@ // Created: // 26 Jan 2023, 09:22:13 // Last edited: -// 08 Jan 2024, 10:43:17 +// 29 Apr 2025, 14:00:25 // Auto updated? // Yes // @@ -313,10 +313,8 @@ pub async fn add( // Convert the hostname and ports to Addresses // Note we do it a bit impractically, but that's to parse the hostname correctly in case it's an IP address. debug!("Parsing hostname..."); - let api: Address = - Address::from_str(&format!("http://{}:{}", hostname.hostname, api_port)).map_err(|source| Error::AddressParseError { source })?; - let drv: Address = - Address::from_str(&format!("grpc://{}:{}", hostname.hostname, drv_port)).map_err(|source| Error::AddressParseError { source })?; + let api: Address = Address::from_str(&format!("{}:{}", hostname.hostname, api_port)).map_err(|source| Error::AddressParseError { source })?; + let drv: Address = Address::from_str(&format!("{}:{}", hostname.hostname, drv_port)).map_err(|source| Error::AddressParseError { source })?; // Warn the user to let them know an alternative is available if it is an IP if name == hostname.hostname && api.is_ip() { @@ -328,7 +326,7 @@ pub async fn add( debug!("Checking instance reachability..."); // Do a simple HTTP call to the health - let health_addr: String = format!("{api}/health"); + let health_addr: String = format!("http://{api}/health"); let res: reqwest::Response = reqwest::get(&health_addr).await.map_err(|source| Error::RequestError { address: health_addr.clone(), source })?; @@ -531,7 +529,7 @@ pub async fn list(show_status: bool) -> Result<(), Error> { // Get the status let status: String = 'reach: { // Do a simple HTTP call to the health and see where we fail - let health_addr: String = format!("{api_addr}/health"); + let health_addr: String = format!("http://{api_addr}/health"); let res: reqwest::Response = match reqwest::get(&health_addr).await { Ok(res) => res, Err(_) => { @@ -647,16 +645,16 @@ pub fn edit( if let Some(hostname) = hostname { // We replace the addresses. Any new ports will be handled in subsequent if let's println!("Updating hostname to {}...", style(&hostname.hostname).cyan().bold()); - info.api = Address::Hostname(format!("http://{}", hostname.hostname), info.api.port()); - info.drv = Address::Hostname(format!("grpc://{}", hostname.hostname), info.drv.port()); + info.api = Address::hostname(format!("{}", hostname.hostname), info.api.port); + info.drv = Address::hostname(format!("{}", hostname.hostname), info.drv.port); } if let Some(port) = api_port { println!("Updating API service port to {}...", style(port).cyan().bold()); - info.api = Address::Hostname(info.api.domain().into(), port); + info.api = Address::hostname(info.api.domain(), port); } if let Some(port) = drv_port { println!("Updating driver service port to {}...", style(port).cyan().bold()); - info.drv = Address::Hostname(info.drv.domain().into(), port); + info.drv = Address::hostname(info.drv.domain(), port); } if let Some(user) = user { println!("Updating username to {}...", style(&user).cyan().bold()); diff --git a/brane-cli/src/planner.rs b/brane-cli/src/planner.rs index 2cf2bac0..bb9f9734 100644 --- a/brane-cli/src/planner.rs +++ b/brane-cli/src/planner.rs @@ -4,7 +4,7 @@ // Created: // 24 Oct 2022, 16:40:21 // Last edited: -// 31 Jan 2024, 14:47:01 +// 14 Nov 2024, 17:58:30 // Auto updated? // Yes // @@ -18,13 +18,12 @@ use std::mem; use std::path::PathBuf; use std::sync::Arc; -use brane_ast::Workflow; -use brane_ast::ast::{Edge, SymTable}; use brane_tsk::errors::PlanError; use brane_tsk::spec::{LOCALHOST, Planner}; use log::debug; use parking_lot::Mutex; use specifications::data::{AccessKind, AvailabilityKind, DataIndex, DataName}; +use specifications::wir::{Edge, SymTable, Workflow}; /***** HELPER FUNCTIONS *****/ @@ -417,7 +416,7 @@ impl OfflinePlanner { #[async_trait::async_trait] impl Planner for OfflinePlanner { - async fn plan(&self, workflow: brane_ast::Workflow) -> Result { + async fn plan(&self, workflow: specifications::wir::Workflow) -> Result { let mut workflow = workflow; // Get the symbol table muteable, so we can... mutate... it diff --git a/brane-cli/src/registry.rs b/brane-cli/src/registry.rs index bae97ae9..3ca907ca 100644 --- a/brane-cli/src/registry.rs +++ b/brane-cli/src/registry.rs @@ -40,7 +40,7 @@ type DateTimeUtc = DateTime; /// This function may error if we could not find, read or parse the config file with the login data. If not found, this likely indicates the user hasn't logged-in yet. #[inline] pub fn get_graphql_endpoint() -> Result { - Ok(format!("{}/graphql", InstanceInfo::from_active_path().map_err(|source| RegistryError::InstanceInfoError { source })?.api)) + Ok(format!("http://{}/graphql", InstanceInfo::from_active_path().map_err(|source| RegistryError::InstanceInfoError { source })?.api)) } /// Get the package endpoint of the Brane API. @@ -52,7 +52,7 @@ pub fn get_graphql_endpoint() -> Result { /// This function may error if we could not find, read or parse the config file with the login data. If not found, this likely indicates the user hasn't logged-in yet. #[inline] pub fn get_packages_endpoint() -> Result { - Ok(format!("{}/packages", InstanceInfo::from_active_path().map_err(|source| RegistryError::InstanceInfoError { source })?.api)) + Ok(format!("http://{}/packages", InstanceInfo::from_active_path().map_err(|source| RegistryError::InstanceInfoError { source })?.api)) } /// Get the data endpoint of the Brane API. @@ -64,7 +64,7 @@ pub fn get_packages_endpoint() -> Result { /// This function may error if we could not find, read or parse the config file with the login data. If not found, this likely indicates the user hasn't logged-in yet. #[inline] pub fn get_data_endpoint() -> Result { - Ok(format!("{}/data", InstanceInfo::from_active_path().map_err(|source| RegistryError::InstanceInfoError { source })?.api)) + Ok(format!("http://{}/data", InstanceInfo::from_active_path().map_err(|source| RegistryError::InstanceInfoError { source })?.api)) } diff --git a/brane-cli/src/repl.rs b/brane-cli/src/repl.rs index 84bbc16a..55b47bcc 100644 --- a/brane-cli/src/repl.rs +++ b/brane-cli/src/repl.rs @@ -4,7 +4,7 @@ // Created: // 12 Sep 2022, 16:42:47 // Last edited: -// 08 Jan 2024, 10:23:14 +// 02 May 2025, 14:38:28 // Auto updated? // Yes // @@ -15,13 +15,16 @@ use std::borrow::Cow::{self, Borrowed, Owned}; use std::fs; use std::io::{Stderr, Stdout}; +use std::sync::Arc; -use brane_ast::ast::Snippet; -use brane_ast::{ParserOptions, Workflow}; +use brane_ast::errors::CompileError; +use brane_ast::state::CompileState; +use brane_ast::{CompileResult, ParserOptions}; use brane_dsl::Language; use brane_exe::FullValue; use brane_tsk::docker::DockerOptions; use brane_tsk::spec::AppId; +use error_trace::ErrorTrace as _; use log::warn; use rustyline::completion::{Completer, FilenameCompleter, Pair}; use rustyline::error::ReadlineError; @@ -31,6 +34,10 @@ use rustyline::history::DefaultHistory; use rustyline::validate::{self, MatchingBracketValidator, Validator}; use rustyline::{CompletionType, Config, Context, EditMode, Editor}; use rustyline_derive::Helper; +use serde::{Deserialize, Serialize}; +use specifications::data::DataIndex; +use specifications::package::PackageIndex; +use specifications::wir::Workflow; pub use crate::errors::ReplError as Error; use crate::instance::InstanceInfo; @@ -74,6 +81,138 @@ fn repl_magicks(line: impl AsRef) -> Option { } } +/// Compiles the given worfklow string to a Workflow. +/// +/// # Arguments +/// - `state`: The CompileState to compile with (and to update). +/// - `source`: The collected source string for now. This will be updated with the new snippet. +/// - `pindex`: The PackageIndex to resolve package imports with. +/// - `dindex`: The DataIndex to resolve data instantiations with. +/// - `user`: If given, then this is some tentative identifier of the user receiving the final workflow result. +/// - `options`: The ParseOptions to use. +/// - `what`: A string describing what we're parsing (e.g., a filename, stdin, ...). +/// - `snippet`: The actual snippet to parse. +/// +/// # Returns +/// A new Workflow that is the compiled and executable version of the given snippet. +/// +/// # Errors +/// This function errors if the given string was not a valid workflow. If that's the case, it's also pretty-printed to stdout with source context. +#[allow(clippy::too_many_arguments)] +pub fn from_source( + state: &mut CompileState, + source: &mut String, + pindex: &PackageIndex, + dindex: &DataIndex, + user: Option<&str>, + options: &ParserOptions, + what: impl AsRef, + snippet: impl AsRef, +) -> Result { + let what: &str = what.as_ref(); + let snippet: &str = snippet.as_ref(); + + // Append the source with the snippet + source.push_str(snippet); + source.push('\n'); + + // Compile the snippet, possibly fetching new ones while at it + let workflow: Workflow = match brane_ast::compile_snippet(state, snippet.as_bytes(), pindex, dindex, options) { + CompileResult::Workflow(mut wf, warns) => { + // Print any warnings to stdout + for w in warns { + w.prettyprint(what, &source); + } + + // Then, inject the username if any + if let Some(user) = user { + debug!("Setting user '{user}' as receiver of final result"); + wf.user = Arc::new(Some(user.into())); + } + + // Done + wf + }, + + CompileResult::Eof(err) => { + // Prettyprint it + err.prettyprint(what, &source); + state.offset += 1 + snippet.chars().filter(|c| *c == '\n').count(); + return Err(CompileError::AstError { what: what.into(), errs: vec![err] }); + }, + CompileResult::Err(errs) => { + // Prettyprint them + for e in &errs { + e.prettyprint(what, &source); + } + state.offset += 1 + snippet.chars().filter(|c| *c == '\n').count(); + return Err(CompileError::AstError { what: what.into(), errs }); + }, + + // Any others should not occur + _ => { + unreachable!(); + }, + }; + debug!("Compiled to workflow:\n\n"); + if log::max_level() == log::LevelFilter::Debug { + brane_ast::traversals::print::ast::do_traversal(&workflow, std::io::stdout()).unwrap(); + } + + // Return + Ok(workflow) +} + + + + + +/***** SNIPPET *****/ +/// Snippets are parsed sections of workflow that keeps track of the parsed lines +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct Snippet { + pub lines: usize, + pub workflow: Workflow, +} + +impl Snippet { + /// Compiles the given worfklow string to a Snippet. + /// + /// # Arguments + /// - `state`: The CompileState to compile with (and to update). + /// - `source`: The collected source string for now. This will be updated with the new snippet. + /// - `pindex`: The PackageIndex to resolve package imports with. + /// - `dindex`: The DataIndex to resolve data instantiations with. + /// - `user`: If given, then this is some tentative identifier of the user receiving the final workflow result. + /// - `options`: The ParseOptions to use. + /// - `what`: A string describing what we're parsing (e.g., a filename, stdin, ...). + /// - `snippet`: The actual snippet to parse. + /// + /// # Returns + /// A new Workflow that is the compiled and executable version of the given snippet. + /// + /// # Errors + /// This function errors if the given string was not a valid workflow. If that's the case, it's also pretty-printed to stdout with source context. + #[allow(clippy::too_many_arguments)] + pub fn from_source( + state: &mut CompileState, + source: &mut String, + pindex: &PackageIndex, + dindex: &DataIndex, + user: Option<&str>, + options: &ParserOptions, + what: impl AsRef, + snippet: impl AsRef, + ) -> Result { + let snippet = snippet.as_ref(); + + Ok(Self { + lines: 1 + snippet.chars().filter(|c| *c == '\n').count(), + workflow: from_source(state, source, pindex, dindex, user, options, what, snippet)?, + }) + } +} + @@ -274,17 +413,8 @@ async fn remote_repl( let workflow = { let pindex = state.pindex.lock(); let dindex = state.dindex.lock(); - Workflow::from_source( - &mut state.state, - &mut state.source, - &pindex, - &dindex, - state.user.as_deref(), - &state.options, - "", - line, - ) - .map_err(|source| Error::RunError { what: "repl", source: run::Error::CompileError(source) })? + from_source(&mut state.state, &mut state.source, &pindex, &dindex, state.user.as_deref(), &state.options, "", line) + .map_err(|source| Error::RunError { what: "repl", source: run::Error::CompileError(source) })? }; let snippet = Snippet { lines: line_count, workflow }; @@ -296,7 +426,7 @@ async fn remote_repl( // Then, we collect and process the result if let Err(source) = process_instance_result(&api_address, &proxy_addr, use_case.clone(), snippet.workflow, res).await { - error!("{}", Error::ProcessError { what: "remote instance VM", source }); + error!("{}", Error::ProcessError { what: "remote instance VM", source }.trace()); continue; } @@ -373,17 +503,9 @@ async fn local_repl( // Compile the workflow let line_count = line.chars().filter(|&c| c == '\n').count(); - let workflow = Workflow::from_source( - &mut state.state, - &mut state.source, - &state.pindex, - &state.dindex, - None, - &state.options, - "", - line.clone(), - ) - .map_err(|source| Error::RunError { what: "local repl", source: run::Error::CompileError(source) })?; + let workflow = + from_source(&mut state.state, &mut state.source, &state.pindex, &state.dindex, None, &state.options, "", line.clone()) + .map_err(|source| Error::RunError { what: "local repl", source: run::Error::CompileError(source) })?; let snippet = Snippet { lines: line_count, workflow }; @@ -392,7 +514,7 @@ async fn local_repl( // Then, we collect and process the result if let Err(source) = process_offline_result(res) { - error!("{}", Error::ProcessError { what: "offline VM", source }); + error!("{}", Error::ProcessError { what: "offline VM", source }.trace()); continue; } diff --git a/brane-cli/src/run.rs b/brane-cli/src/run.rs index 5f3e8853..6de54fac 100644 --- a/brane-cli/src/run.rs +++ b/brane-cli/src/run.rs @@ -4,7 +4,7 @@ // Created: // 12 Sep 2022, 16:42:57 // Last edited: -// 07 Mar 2024, 14:14:56 +// 05 May 2025, 11:15:39 // Auto updated? // Yes // @@ -19,9 +19,8 @@ use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::Arc; -use brane_ast::ast::Snippet; +use brane_ast::ParserOptions; use brane_ast::state::CompileState; -use brane_ast::{ParserOptions, Workflow}; use brane_dsl::Language; use brane_exe::FullValue; use brane_exe::dummy::{DummyVm, Error as DummyVmError}; @@ -33,6 +32,7 @@ use parking_lot::{Mutex, MutexGuard}; use specifications::data::{AccessKind, DataIndex, DataInfo}; use specifications::driving::{CreateSessionRequest, DriverServiceClient, ExecuteRequest}; use specifications::package::PackageIndex; +use specifications::wir::Workflow; use tempfile::{TempDir, tempdir}; use tonic::Code; @@ -40,6 +40,7 @@ use crate::data; use crate::errors::OfflineVmError; pub use crate::errors::RunError as Error; use crate::instance::InstanceInfo; +use crate::repl::{Snippet, from_source}; use crate::utils::{ensure_datasets_dir, ensure_packages_dir, get_datasets_dir, get_packages_dir}; use crate::vm::OfflineVm; @@ -79,7 +80,7 @@ pub async fn initialize_instance( // Connect to the server with gRPC debug!("Connecting to driver '{}'...", drv_endpoint); - let mut client = DriverServiceClient::connect(drv_endpoint.to_string()) + let mut client = DriverServiceClient::connect(format!("grpc://{drv_endpoint}")) .await .map_err(|source| Error::ClientConnectError { address: drv_endpoint.into(), source })?; @@ -127,7 +128,7 @@ pub async fn initialize_instance( /// - `profile`: If given, prints the profile timings to stdout if reported by the remote. /// /// # Returns -/// A [`FullValue`] carrying the result of the snippet (or [`FullValue::Void`]). +/// A [`FullValue`] carrying the result of the snippet (or [`FullValue::Void`]), and a [`ProgramCounter`] in case a [`FullValue::Data`] is returned telling us which edge downloaded it. /// /// # Errors /// This function may error if anything in the whole shebang crashed. This can be things client-side, but also remote-side. @@ -218,7 +219,7 @@ pub async fn run_instance( /// - `proxy_addr`: If given, proxies all data transfers through the proxy at the given location. /// - `certs_dir`: The directory where certificates are stored. Expected to contain nested directories that store the certs by domain ID. /// - `datasets_dir`: The directory where we will download the data to. It will be added under a new folder with its own name. -/// - `result`: The value to process. +/// - `result`: The value to process, together with a program counter if it was the result of a toplevel return. /// /// # Returns /// Nothing, but does print any result to stdout. It may also download a remote dataset if one is given. @@ -239,8 +240,8 @@ pub async fn process_instance( let datasets_dir: &Path = datasets_dir.as_ref(); // We only print - if result != FullValue::Void { - println!("\nWorkflow returned value {}", style(format!("'{result}'")).bold().cyan()); + if !matches!(result, FullValue::Void) { + println!("\nWorkflow returned value {}", style(format!("'{}'", result)).bold().cyan()); // FIXME: Clean up this blob // Treat some values special @@ -256,7 +257,7 @@ pub async fn process_instance( let data_dir: PathBuf = datasets_dir.join(name.to_string()); // Fetch a new, local DataIndex to get up-to-date entries - let data_addr: String = format!("{api_endpoint}/data/info"); + let data_addr: String = format!("http://{api_endpoint}/data/info"); let index: DataIndex = brane_tsk::api::get_data_index(&data_addr).await.map_err(|source| Error::RemoteDataIndexError { address: data_addr, source })?; @@ -496,14 +497,14 @@ pub async fn initialize_instance_vm( // We fetch a local copy of the indices for compiling debug!("Fetching global package & data indices from '{}'...", api_endpoint); - let package_addr: String = format!("{api_endpoint}/graphql"); + let package_addr: String = format!("http://{api_endpoint}/graphql"); let pindex: Arc> = match brane_tsk::api::get_package_index(&package_addr).await { Ok(pindex) => Arc::new(Mutex::new(pindex)), Err(source) => { return Err(Error::RemotePackageIndexError { address: package_addr, source }); }, }; - let data_addr: String = format!("{api_endpoint}/data/info"); + let data_addr: String = format!("http://{api_endpoint}/data/info"); let dindex: Arc> = match brane_tsk::api::get_data_index(&data_addr).await { Ok(dindex) => Arc::new(Mutex::new(dindex)), Err(source) => { @@ -534,9 +535,8 @@ pub async fn run_dummy_vm(state: &mut DummyVmState, what: impl AsRef, snipp let snippet: &str = snippet.as_ref(); // Compile the workflow - let workflow: Workflow = - Workflow::from_source(&mut state.state, &mut state.source, &state.pindex, &state.dindex, None, &state.options, what, snippet) - .map_err(Error::CompileError)?; + let workflow: Workflow = from_source(&mut state.state, &mut state.source, &state.pindex, &state.dindex, None, &state.options, what, snippet) + .map_err(Error::CompileError)?; // Run it in the local VM (which is a bit ugly do to the need to consume the VM itself) let res: (DummyVm, Result) = state.vm.take().unwrap().exec(workflow).await; diff --git a/brane-cli/src/test.rs b/brane-cli/src/test.rs index 5a992429..e5af7d5b 100644 --- a/brane-cli/src/test.rs +++ b/brane-cli/src/test.rs @@ -4,7 +4,7 @@ // Created: // 21 Sep 2022, 16:23:37 // Last edited: -// 25 May 2023, 20:12:59 +// 29 Apr 2025, 11:32:04 // Auto updated? // Yes // @@ -16,7 +16,6 @@ use std::fs; use std::path::PathBuf; use brane_ast::ParserOptions; -use brane_ast::ast::Snippet; use brane_exe::FullValue; use brane_tsk::docker::DockerOptions; use brane_tsk::input::prompt_for_input; @@ -26,6 +25,7 @@ use specifications::package::PackageInfo; use specifications::version::Version; use crate::errors::TestError; +use crate::repl::Snippet; use crate::run::{self, OfflineVmState, initialize_offline_vm, run_offline_vm}; use crate::utils::{ensure_datasets_dir, ensure_package_dir}; diff --git a/brane-cli/src/vm.rs b/brane-cli/src/vm.rs index 00c6da46..ed636084 100644 --- a/brane-cli/src/vm.rs +++ b/brane-cli/src/vm.rs @@ -4,7 +4,7 @@ // Created: // 24 Oct 2022, 15:34:05 // Last edited: -// 31 Jan 2024, 14:23:06 +// 02 May 2025, 15:15:31 // Auto updated? // Yes // @@ -20,11 +20,8 @@ use std::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard}; use base64::Engine as _; use base64::engine::general_purpose::STANDARD; -use brane_ast::Workflow; -use brane_ast::locations::Location; use brane_exe::Vm; use brane_exe::errors::VmError; -use brane_exe::pc::ProgramCounter; use brane_exe::spec::{RunState, TaskInfo, VmPlugin}; use brane_exe::value::FullValue; use brane_shr::formatters::BlockFormatter; @@ -39,7 +36,10 @@ use parking_lot::Mutex; use specifications::container::{Image, VolumeBind}; use specifications::data::{AccessKind, DataIndex, DataInfo, DataName, PreprocessKind}; use specifications::package::{PackageIndex, PackageInfo}; +use specifications::pc::ProgramCounter; use specifications::profiling::ProfileScopeHandle; +use specifications::wir::Workflow; +use specifications::wir::locations::Location; use tokio::fs as tfs; use tokio::io::AsyncWriteExt; diff --git a/brane-ctl/Cargo.toml b/brane-ctl/Cargo.toml index f1a46146..bfac7c46 100644 --- a/brane-ctl/Cargo.toml +++ b/brane-ctl/Cargo.toml @@ -13,16 +13,17 @@ name = "branectl" path = "src/main.rs" [dependencies] +base16ct = { version = "0.2.0", features = ["std"] } base64ct = "1.6.0" bollard = "0.18.0" +chrono = "0.4.39" clap = { version = "4.5.6", features = ["derive","env"] } console = "0.15.5" dialoguer = "0.11.0" -diesel = { version = "2.2.3", features = ["sqlite"] } diesel_migrations = "2.2.0" dirs = "6.0.0" dotenvy = "0.15.0" -eflint-to-json = { git = "https://github.com/braneframework/policy-reasoner" } +# eflint-to-json = { git = "https://github.com/braneframework/policy-reasoner" } enum-debug.workspace = true error-trace.workspace = true humanlog.workspace = true @@ -32,11 +33,11 @@ jsonwebtoken = "9.2.0" lazy_static = "1.4.0" log = "0.4.22" names.workspace = true -policy = { git = "https://github.com/braneframework/policy-reasoner" } -srv = { git = "https://github.com/braneframework/policy-reasoner" } +# policy = { git = "https://github.com/braneframework/policy-reasoner" } +# srv = { git = "https://github.com/braneframework/policy-reasoner" } rand = "0.9.0" serde = { version = "1.0.204", features = ["derive"] } -serde_json = "1.0.120" +serde_json = { version = "1.0.120", features = ["raw_value"] } serde_yaml = { version = "0.0.10", package = "serde_yml" } shlex = "1.1.0" tempfile = "3.10.1" @@ -47,6 +48,10 @@ tokio = { version = "1.38.0", features = [] } # Note that this crate needs a root store because it can download from the internet reqwest = { workspace = true, features = ["rustls-tls-webpki-roots"] } +# eflint-json = { git = "https://gitlab.com/eflint/json-spec-rs", features = ["v0_1_0_srv"] } +# eflint-to-json = { git = "https://gitlab.com/eflint/eflint-to-json-rs", features = ["async-tokio"] } +policy-store = { git = "https://github.com/BraneFramework/policy-store", features = ["sqlite-database", "sqlite-database-embedded-migrations"] } + brane-cfg = { path = "../brane-cfg" } brane-shr = { path = "../brane-shr" } brane-tsk = { path = "../brane-tsk" } diff --git a/brane-ctl/src/cli.rs b/brane-ctl/src/cli.rs index f9e5fc6f..74beb7bc 100644 --- a/brane-ctl/src/cli.rs +++ b/brane-ctl/src/cli.rs @@ -346,14 +346,6 @@ pub(crate) enum GenerateSubcommand { /// The path to write to. #[clap(short, long, default_value = "./policies.db", help = "The path to write the policy database file to.")] path: PathBuf, - /// The branch to pull the migrations from. - #[clap( - short, - long, - default_value = "main", - help = "The branch of the `https://github.com/braneframework/policy-reasoner` repository from which to pull the Diesel migrations." - )] - branch: String, }, #[clap(name = "policy_secret", about = "Generates a new JWT key for use in the `brane-chk` service.")] @@ -558,7 +550,7 @@ pub(crate) enum PolicySubcommand { help = "The version of the policy to activate. Omit to have branectl download the version metadata from the checker and let you choose \ interactively." )] - version: Option, + version: Option, /// Address on which to find the checker. #[clap( @@ -595,8 +587,7 @@ pub(crate) enum PolicySubcommand { #[clap( short, long, - help = "The language of the input policy. Options are 'eflint' and 'eflint-json', where the former will be compiled to the latter \ - before sending. If omitted, will attempt to deduce it based on the 'INPUT'." + help = "The language of the input policy. Options are 'eflint'. If omitted, will attempt to deduce it based on the 'INPUT'." )] language: Option, diff --git a/brane-ctl/src/errors.rs b/brane-ctl/src/errors.rs index b6fb207c..f3409a04 100644 --- a/brane-ctl/src/errors.rs +++ b/brane-ctl/src/errors.rs @@ -4,7 +4,7 @@ // Created: // 21 Nov 2022, 15:46:26 // Last edited: -// 26 Jun 2024, 16:44:55 +// 01 May 2025, 10:43:15 // Auto updated? // Yes // @@ -186,7 +186,7 @@ pub enum GenerateError { MigrationsRetrieve { path: PathBuf, source: diesel_migrations::MigrationError }, /// Failed to connect to the database file. #[error("Failed to connect to SQLite database file '{}'", path.display())] - DatabaseConnect { path: PathBuf, source: diesel::ConnectionError }, + DatabaseCreate { path: PathBuf, source: policy_store::databases::sqlite::DatabaseError }, /// Failed to apply a set of mitigations. #[error("Failed to apply migrations to SQLite database file '{}'", path.display())] MigrationsApply { path: PathBuf, source: Box }, @@ -277,6 +277,10 @@ pub enum LifetimeError { /// The given job failed. #[error("Command '{}' failed with exit code {} (see output above)", style(format!("{command:?}")).bold(), style(status.code().map(|c| c.to_string()).unwrap_or_else(|| "non-zero".into())).bold())] JobFailure { command: Command, status: ExitStatus }, + + /// Failed to generate a new JWT given the given key. + #[error("Failed to generate a JWT with the given key {key:?}")] + TokenGenerate { key: PathBuf, err: specifications::policy::Error }, } /// Errors that relate to package subcommands. @@ -372,7 +376,7 @@ pub enum PairParseError { #[derive(Debug, thiserror::Error)] pub enum PolicyInputLanguageParseError { /// The given identifier was not recognized. - #[error("Unknown policy input language '{raw}' (options are 'eflint' or 'eflint-json')")] + #[error("Unknown policy input language '{raw}' (options are 'eflint')")] Unknown { raw: String }, } diff --git a/brane-ctl/src/generate.rs b/brane-ctl/src/generate.rs index e32d6eb8..7da647ea 100644 --- a/brane-ctl/src/generate.rs +++ b/brane-ctl/src/generate.rs @@ -4,7 +4,7 @@ // Created: // 21 Nov 2022, 15:40:47 // Last edited: -// 01 May 2024, 15:20:56 +// 29 Apr 2025, 14:00:41 // Auto updated? // Yes // @@ -27,25 +27,23 @@ use brane_cfg::backend::{BackendFile, Credentials}; use brane_cfg::info::Info as _; use brane_cfg::infra::{InfraFile, InfraLocation}; use brane_cfg::node::{ - self, CentralConfig, CentralPaths, CentralServices, ExternalService, NodeConfig, NodeSpecificConfig, PrivateOrExternalService, PrivateService, - ProxyPaths, ProxyServices, PublicService, WorkerConfig, WorkerPaths, WorkerServices, WorkerUsecase, + self, CentralConfig, CentralPaths, CentralServices, DoublePrivateService, ExternalService, NodeConfig, NodeSpecificConfig, + PrivateOrExternalService, PrivateService, ProxyPaths, ProxyServices, PublicService, WorkerConfig, WorkerPaths, WorkerServices, WorkerUsecase, }; use brane_cfg::proxy::{self, ForwardConfig}; -use brane_shr::fs::{DownloadSecurity, set_executable}; +use brane_shr::fs::set_executable; use console::style; -use diesel::{Connection as _, SqliteConnection}; -use diesel_migrations::{FileBasedMigrations, MigrationHarness as _}; use enum_debug::EnumDebug as _; use jsonwebtoken::jwk::{self, Jwk, JwkSet, KeyAlgorithm, OctetKeyParameters, OctetKeyType, PublicKeyUse}; use log::{debug, info, warn}; +use policy_store::databases::sqlite::SQLiteDatabase; use rand::distr::Alphanumeric; use rand::rngs::OsRng; use rand::{Rng as _, TryRngCore}; use serde::Serialize; -use specifications::address::Address; +use specifications::address::{Address, Host}; use specifications::package::Capability; use specifications::policy::generate_policy_token; -use tempfile::TempDir; pub use crate::errors::GenerateError as Error; use crate::spec::{GenerateBackendSubcommand, GenerateCertsSubcommand, GenerateNodeSubcommand, Pair}; @@ -675,21 +673,21 @@ pub fn node( api: PublicService { name: api_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), api_port).into(), - address: Address::Hostname(format!("http://{api_name}"), api_port), + address: Address::hostname(format!("{api_name}"), api_port), - external_address: Address::Hostname(format!("http://{hostname}"), api_port), + external_address: Address::hostname(format!("{hostname}"), api_port), }, drv: PublicService { name: drv_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), drv_port).into(), - address: Address::Hostname(format!("grpc://{drv_name}"), drv_port), + address: Address::hostname(format!("{drv_name}"), drv_port), - external_address: Address::Hostname(format!("grpc://{hostname}"), drv_port), + external_address: Address::hostname(format!("{hostname}"), drv_port), }, plr: PrivateService { name: plr_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), plr_port).into(), - address: Address::Hostname(format!("http://{plr_name}"), plr_port), + address: Address::hostname(format!("{plr_name}"), plr_port), }, prx: if let Some(address) = external_proxy { PrivateOrExternalService::External(ExternalService { address }) @@ -697,14 +695,14 @@ pub fn node( PrivateOrExternalService::Private(PrivateService { name: prx_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), prx_port).into(), - address: Address::Hostname(format!("http://{prx_name}"), prx_port), + address: Address::hostname(format!("{prx_name}"), prx_port), }) }, aux_scylla: PrivateService { name: "aux-scylla".into(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), 9042).into(), - address: Address::Hostname("aux-scylla".into(), 9042), + address: Address::hostname("aux-scylla", 9042), }, }, }), @@ -718,8 +716,8 @@ pub fn node( use_cases, backend, policy_database, - policy_deliberation_secret, - policy_expert_secret, + policy_delib_secret, + policy_store_secret, policy_audit_log, proxy, certs, @@ -736,7 +734,8 @@ pub fn node( prx_port, reg_port, job_port, - chk_port, + chk_delib_port, + chk_store_port, } => { // Remove any scheme, paths, ports, whatever from the hostname let mut hostname: &str = &hostname; @@ -754,8 +753,8 @@ pub fn node( // Resolve any path depending on the '$CONFIG' let backend: PathBuf = resolve_config_path(backend, &config_path); - let policy_deliberation_secret: PathBuf = resolve_config_path(policy_deliberation_secret, &config_path); - let policy_expert_secret: PathBuf = resolve_config_path(policy_expert_secret, &config_path); + let policy_delib_secret: PathBuf = resolve_config_path(policy_delib_secret, &config_path); + let policy_store_secret: PathBuf = resolve_config_path(policy_store_secret, &config_path); let policy_audit_log: Option = policy_audit_log.map(|p| resolve_config_path(p, &config_path)); let proxy: PathBuf = resolve_config_path(proxy, &config_path); let certs: PathBuf = resolve_config_path(certs, &config_path); @@ -763,8 +762,8 @@ pub fn node( // Ensure the directory structure is there ensure_dir_of(&backend, fix_dirs)?; ensure_dir_of(&policy_database, fix_dirs)?; - ensure_dir_of(&policy_deliberation_secret, fix_dirs)?; - ensure_dir_of(&policy_expert_secret, fix_dirs)?; + ensure_dir_of(&policy_delib_secret, fix_dirs)?; + ensure_dir_of(&policy_store_secret, fix_dirs)?; if let Some(policy_audit_log) = &policy_audit_log { ensure_dir_of(policy_audit_log, fix_dirs)?; } @@ -792,8 +791,8 @@ pub fn node( backend: canonicalize(backend)?, policy_database: canonicalize(policy_database)?, - policy_deliberation_secret: canonicalize(policy_deliberation_secret)?, - policy_expert_secret: canonicalize(policy_expert_secret)?, + policy_delib_secret: canonicalize(policy_delib_secret)?, + policy_store_secret: canonicalize(policy_store_secret)?, policy_audit_log: policy_audit_log.map(canonicalize).transpose()?, proxy: if external_proxy.is_some() { None } else { Some(canonicalize(proxy)?) }, @@ -807,21 +806,22 @@ pub fn node( reg: PublicService { name: reg_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), reg_port).into(), - address: Address::Hostname(format!("https://{reg_name}"), reg_port), + address: Address::hostname(format!("{reg_name}"), reg_port), - external_address: Address::Hostname(format!("https://{hostname}"), reg_port), + external_address: Address::hostname(format!("{hostname}"), reg_port), }, job: PublicService { name: job_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), job_port).into(), - address: Address::Hostname(format!("grpc://{job_name}"), job_port), + address: Address::hostname(format!("{job_name}"), job_port), - external_address: Address::Hostname(format!("grpc://{hostname}"), job_port), + external_address: Address::hostname(format!("{hostname}"), job_port), }, - chk: PrivateService { - name: chk_name.clone(), - bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), chk_port).into(), - address: Address::Hostname(format!("http://{chk_name}"), chk_port), + chk: DoublePrivateService { + name: chk_name.clone(), + host: Host::new_name(chk_name), + delib: chk_delib_port, + store: chk_store_port, }, prx: if let Some(address) = external_proxy { PrivateOrExternalService::External(ExternalService { address }) @@ -829,7 +829,7 @@ pub fn node( PrivateOrExternalService::Private(PrivateService { name: prx_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), prx_port).into(), - address: Address::Hostname(format!("http://{prx_name}"), prx_port), + address: Address::hostname(format!("{prx_name}"), prx_port), }) }, }, @@ -867,9 +867,9 @@ pub fn node( prx: PublicService { name: prx_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), prx_port).into(), - address: Address::Hostname(format!("http://{prx_name}"), prx_port), + address: Address::hostname(format!("{prx_name}"), prx_port), - external_address: Address::Hostname(format!("http://{hostname}"), prx_port), + external_address: Address::hostname(format!("{hostname}"), prx_port), }, }, }), @@ -1140,8 +1140,8 @@ pub fn infra( for loc in locations { locs.insert(loc.0.clone(), InfraLocation { name: beautify_id(loc.0), - registry: Address::hostname(format!("https://{}", loc.1), 50051), - delegate: Address::hostname(format!("grpc://{}", loc.1), 50052), + registry: Address::hostname(format!("{}", loc.1), 50051), + delegate: Address::hostname(format!("{}", loc.1), 50052), }); } @@ -1156,7 +1156,7 @@ pub fn infra( } for port in reg_ports { match locs.get_mut(&port.0) { - Some(loc) => *loc.registry.port_mut() = port.1, + Some(loc) => loc.registry.port = port.1, None => { return Err(Error::UnknownLocation { loc: port.0 }); }, @@ -1164,7 +1164,7 @@ pub fn infra( } for port in job_ports { match locs.get_mut(&port.0) { - Some(loc) => *loc.delegate.port_mut() = port.1, + Some(loc) => loc.delegate.port = port.1, None => { return Err(Error::UnknownLocation { loc: port.0 }); }, @@ -1259,56 +1259,16 @@ pub fn backend( /// /// # Errors /// This function may error if I/O errors occur while writing the file. -pub async fn policy_database(fix_dirs: bool, path: PathBuf, branch: String) -> Result<(), Error> { - info!("Generating policies.db at '{}'...", path.display()); +pub async fn policy_database(fix_dirs: bool, path: PathBuf) -> Result<(), Error> { + info!("Generating policies.db at {:?}...", path.display()); - // First, touch the file alive - debug!("Creating policy database file '{}'...", path.display()); + // First, ensure the parent exists + debug!("Creating policy database file {:?} parent directory...", path.display()); ensure_dir_of(&path, fix_dirs)?; - File::create(&path).map_err(|source| Error::FileCreateError { what: "policy database", path: path.clone(), source })?; - - // Next, fetch the migrations to run - debug!("Retrieving up-to-date mitigations from 'https://github.com/braneframework/policy-reasoner ({branch})..."); - // NOTE: We're not using `_dir`, but keep it to prevent the directory from being removed once the objects gets dropped - let (_dir, migrations): (TempDir, FileBasedMigrations) = { - // Prepare the input URL and output directory - let url = format!("https://api.github.com/repos/braneframework/policy-reasoner/tarball/{branch}"); - let dir = TempDir::new().map_err(|source| Error::TempDirError { source })?; - - // Download the file - let tar_path: PathBuf = dir.path().join("repo.tar.gz"); - let dir_path: PathBuf = dir.path().join("repo"); - brane_shr::fs::download_file_async(&url, &tar_path, DownloadSecurity { checksum: None, https: true }, None) - .await - .map_err(|source| Error::RepoDownloadError { repo: url, target: dir_path.clone(), source })?; - brane_shr::fs::unarchive_async(&tar_path, &dir_path).await.map_err(|source| Error::RepoUnpackError { - tar: tar_path, - target: dir_path.clone(), - source, - })?; - - // Resolve that one weird folder in there - let dir_path: PathBuf = brane_shr::fs::recurse_in_only_child_async(&dir_path) - .await - .map_err(|source| Error::RepoRecurseError { target: dir_path.clone(), source })?; - - // Read that as the migrations - let migrations: FileBasedMigrations = FileBasedMigrations::find_migrations_directory_in_path(&dir_path) - .map_err(|source| Error::MigrationsRetrieve { path: dir_path.clone(), source })?; - - (dir, migrations) - }; - - // Apply that with diesel - { - // Connect to the database - debug!("Applying migrations..."); - let mut conn: SqliteConnection = - SqliteConnection::establish(&path.display().to_string()).map_err(|source| Error::DatabaseConnect { path: path.clone(), source })?; - - // Attempt to run the migration - conn.run_pending_migrations(migrations).map_err(|source| Error::MigrationsApply { path: path.clone(), source })?; - } + // Now we touch the file alive by making a brief connection + SQLiteDatabase::<()>::new_async(&path, policy_store::databases::sqlite::MIGRATIONS) + .await + .map_err(|source| Error::DatabaseCreate { path: path.clone(), source })?; // Done println!("Successfully generated {}", style(path.display().to_string()).bold().green()); diff --git a/brane-ctl/src/lifetime.rs b/brane-ctl/src/lifetime.rs index a1b4f15b..3ede7108 100644 --- a/brane-ctl/src/lifetime.rs +++ b/brane-ctl/src/lifetime.rs @@ -4,7 +4,7 @@ // Created: // 22 Nov 2022, 11:19:22 // Last edited: -// 07 Mar 2024, 09:55:58 +// 02 May 2025, 11:03:39 // Auto updated? // Yes // @@ -22,6 +22,7 @@ use std::net::IpAddr; use std::path::{Path, PathBuf}; use std::process::{Command, Output, Stdio}; use std::str::FromStr as _; +use std::time::Duration; use bollard::Docker; use brane_cfg::info::Info as _; @@ -35,7 +36,7 @@ use console::style; use log::{debug, info}; use rand::Rng; use rand::distr::Alphanumeric; -use serde::{Deserialize, Serialize}; +use serde::Serialize; use specifications::container::Image; use specifications::version::Version; @@ -45,10 +46,8 @@ use crate::spec::{LogsOpts, StartOpts, StartSubcommand}; /***** HELPER STRUCTS *****/ /// Defines a struct that writes to a valid compose file for overriding hostnames. -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Serialize)] struct ComposeOverrideFile { - /// The version number to use - version: &'static str, /// The services themselves services: HashMap<&'static str, ComposeOverrideFileService>, } @@ -56,7 +55,7 @@ struct ComposeOverrideFile { /// Defines a struct that defines how a service looks like in a valid compose file for overriding hostnames. -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Serialize)] struct ComposeOverrideFileService { /// Defines any additional mounts volumes: Vec, @@ -296,8 +295,8 @@ fn prepare_host(node_config: &NodeConfig) -> Result<(), Error> { packages: _, backend: _, policy_database: _, - policy_deliberation_secret: _, - policy_expert_secret: _, + policy_delib_secret: _, + policy_store_secret: _, policy_audit_log, proxy: _, data: _, @@ -377,7 +376,6 @@ fn generate_override_file(node_config: &NodeConfig, hosts: &HashMap token, + Err(err) => return Err(Error::TokenGenerate { key: policy_delib_secret.clone(), err }), + }; + // Add the environment variables, which are basically just central-specific paths to mount in the compose file res.extend([ // Also add the location ID @@ -583,11 +591,13 @@ fn construct_envs(version: &Version, node_config_path: &Path, node_config: &Node ("CHK_NAME", OsString::from(&chk.name.as_str())), ("JOB_NAME", OsString::from(&job.name.as_str())), ("CHK_NAME", OsString::from(&chk.name.as_str())), + // Tokens + ("CHECKER_DELIB_TOKEN", OsString::from(&delib_token.as_str())), // Paths ("BACKEND", canonicalize_join(node_config_dir, backend)?.as_os_str().into()), ("POLICY_DB", canonicalize_join(node_config_dir, policy_database)?.as_os_str().into()), - ("POLICY_DELIBERATION_SECRET", canonicalize_join(node_config_dir, policy_deliberation_secret)?.as_os_str().into()), - ("POLICY_EXPERT_SECRET", canonicalize_join(node_config_dir, policy_expert_secret)?.as_os_str().into()), + ("POLICY_DELIB_KEYS", canonicalize_join(node_config_dir, policy_delib_secret)?.as_os_str().into()), + ("POLICY_STORE_KEYS", canonicalize_join(node_config_dir, policy_store_secret)?.as_os_str().into()), ("CERTS", canonicalize_join(node_config_dir, certs)?.as_os_str().into()), ("PACKAGES", canonicalize_join(node_config_dir, packages)?.as_os_str().into()), ("DATA", canonicalize_join(node_config_dir, data)?.as_os_str().into()), @@ -595,7 +605,8 @@ fn construct_envs(version: &Version, node_config_path: &Path, node_config: &Node ("TEMP_DATA", canonicalize_join(node_config_dir, temp_data)?.as_os_str().into()), ("TEMP_RESULTS", canonicalize_join(node_config_dir, temp_results)?.as_os_str().into()), // Ports - ("CHK_PORT", OsString::from(format!("{}", chk.bind.port()))), + ("CHK_DELIB_PORT", OsString::from(chk.delib.to_string())), + ("CHK_STORE_PORT", OsString::from(chk.store.to_string())), ("REG_PORT", OsString::from(format!("{}", reg.bind.port()))), ("JOB_PORT", OsString::from(format!("{}", job.bind.port()))), ]); diff --git a/brane-ctl/src/main.rs b/brane-ctl/src/main.rs index ee47acc9..a1b7f24f 100644 --- a/brane-ctl/src/main.rs +++ b/brane-ctl/src/main.rs @@ -4,7 +4,7 @@ // Created: // 15 Nov 2022, 09:18:40 // Last edited: -// 01 May 2024, 15:20:07 +// 19 Nov 2024, 14:46:33 // Auto updated? // Yes // @@ -98,9 +98,9 @@ async fn main() { } }, - GenerateSubcommand::PolicyDatabase { fix_dirs, path, branch } => { + GenerateSubcommand::PolicyDatabase { fix_dirs, path } => { // Call the thing - if let Err(err) = generate::policy_database(fix_dirs, path, branch).await { + if let Err(err) = generate::policy_database(fix_dirs, path).await { error!("{}", err.trace()); std::process::exit(1); } diff --git a/brane-ctl/src/policies.rs b/brane-ctl/src/policies.rs index f3315911..dc62785f 100644 --- a/brane-ctl/src/policies.rs +++ b/brane-ctl/src/policies.rs @@ -4,7 +4,7 @@ // Created: // 10 Jan 2024, 15:57:54 // Last edited: -// 24 Jun 2024, 17:40:43 +// 01 May 2025, 17:11:24 // Auto updated? // Yes // @@ -12,6 +12,8 @@ //! Implements handlers for subcommands to `branectl policies ...` // +use std::borrow::Cow; +use std::collections::HashMap; use std::error; use std::ffi::OsStr; use std::fmt::{Display, Formatter, Result as FResult}; @@ -21,21 +23,23 @@ use std::time::Duration; use brane_cfg::info::Info; use brane_cfg::node::{NodeConfig, NodeSpecificConfig, WorkerConfig}; use brane_shr::formatters::BlockFormatter; +use chrono::{DateTime, Local}; use console::style; use dialoguer::theme::ColorfulTheme; use enum_debug::EnumDebug; use error_trace::trace; use log::{debug, info}; -use policy::{Policy, PolicyVersion}; +use policy_store::servers::axum::spec::{ActivateRequest, GetActiveVersionResponse, GetVersionsResponse}; +use policy_store::spec::metadata::{AttachedMetadata, Metadata}; use rand::Rng; use rand::distr::Alphanumeric; use reqwest::{Client, Request, Response, StatusCode}; -use serde_json::value::RawValue; +use serde_json::Value; use specifications::address::{Address, AddressOpt}; -use specifications::checking::{ - POLICY_API_ADD_VERSION, POLICY_API_GET_ACTIVE_VERSION, POLICY_API_GET_VERSION, POLICY_API_LIST_POLICIES, POLICY_API_SET_ACTIVE_VERSION, +use specifications::checking::store::{ + ACTIVATE_PATH, ADD_VERSION_PATH, AddVersionRequest, AddVersionResponse, EFlintHaskellReasonerWithInterfaceContext, GET_ACTIVE_VERSION_PATH, + GET_CONTEXT_PATH, GET_VERSION_CONTENT_PATH, GET_VERSIONS_PATH, GetContextResponse, }; -use srv::models::{AddPolicyPostModel, PolicyContentPostModel, SetVersionPostModel}; use tokio::fs::{self as tfs, File as TFile}; use crate::spec::PolicyInputLanguage; @@ -47,12 +51,14 @@ use crate::spec::PolicyInputLanguage; pub enum Error { /// Failed to get the active version of the policy. ActiveVersionGet { addr: Address, err: Box }, + /// Given JSON policy was not a phrases request. + IllegalInput { path: PathBuf, got: String }, /// Failed to deserialize the read input file as JSON. InputDeserialize { path: PathBuf, raw: String, err: serde_json::Error }, /// Failed to read the input file. InputRead { path: PathBuf, err: std::io::Error }, - /// Failed to compile the input file to eFLINT JSON. - InputToJson { path: PathBuf, err: eflint_to_json::Error }, + /// Failed to prompt the user for a string input. + InputString { what: &'static str, err: dialoguer::Error }, /// The wrong policy was activated on the remote checker, somehow. InvalidPolicyActivated { addr: Address, got: Option, expected: Option }, /// A policy language was attempted to derive from a path without extension. @@ -86,22 +92,27 @@ pub enum Error { /// The policy was given on stdout but no language was specified. UnspecifiedInputLanguage, /// Failed to query the checker about a specific version. - VersionGetBody { addr: Address, version: i64, err: Box }, + VersionGetBody { addr: Address, version: u64, err: Box }, /// Failed to query the user which version to select. VersionSelect { err: dialoguer::Error }, /// Failed to get the versions on the remote checker. VersionsGet { addr: Address, err: Box }, + /// Failed to serialize a given policy version. + VersionSerialize { version: u64, err: serde_json::Error }, } impl Display for Error { fn fmt(&self, f: &mut Formatter<'_>) -> FResult { use Error::*; match self { ActiveVersionGet { addr, .. } => write!(f, "Failed to get active version of checker '{addr}'"), + IllegalInput { path, got } => { + write!(f, "eFLINT JSON file {:?} is not a list of phrases or a phrases request (got: {:?})", path.display(), got) + }, InputDeserialize { path, raw, .. } => { write!(f, "Failed to deserialize contents of '{}' to JSON\n\nRaw value:\n{}\n", path.display(), BlockFormatter::new(raw)) }, InputRead { path, .. } => write!(f, "Failed to read input file '{}'", path.display()), - InputToJson { path, .. } => write!(f, "Failed to compile input file '{}' to eFLINT JSON", path.display()), + InputString { what, .. } => write!(f, "Failed to ask you {what}"), InvalidPolicyActivated { addr, got, expected } => write!( f, "Checker '{}' activated wrong policy; it says it activated {}, but we requested to activate {}", @@ -149,6 +160,7 @@ impl Display for Error { VersionGetBody { addr, version, .. } => write!(f, "Failed to get policy body of policy '{version}' stored in checker '{addr}'"), VersionSelect { .. } => write!(f, "Failed to ask you which version to make active"), VersionsGet { addr, .. } => write!(f, "Failed to get policy versions stored in checker '{addr}'"), + VersionSerialize { version, .. } => write!(f, "Failed to serialize policy {version}"), } } } @@ -157,9 +169,10 @@ impl error::Error for Error { use Error::*; match self { ActiveVersionGet { err, .. } => Some(&**err), + IllegalInput { .. } => None, InputDeserialize { err, .. } => Some(err), InputRead { err, .. } => Some(err), - InputToJson { err, .. } => Some(err), + InputString { err, .. } => Some(err), InvalidPolicyActivated { .. } => None, MissingExtension { .. } => None, NodeConfigIncompatible { .. } => None, @@ -179,6 +192,7 @@ impl error::Error for Error { VersionGetBody { err, .. } => Some(&**err), VersionSelect { err } => Some(err), VersionsGet { err, .. } => Some(&**err), + VersionSerialize { err, .. } => Some(err), } } } @@ -244,14 +258,14 @@ fn resolve_token(node_config_path: impl AsRef, worker: &mut Option { debug!("Using generated token '{token}'"); *worker = Some(worker_cfg); Ok(token) }, - Err(err) => Err(Error::TokenGenerate { secret: worker_cfg.paths.policy_expert_secret, err }), + Err(err) => Err(Error::TokenGenerate { secret: worker_cfg.paths.policy_store_secret, err }), } } } @@ -272,10 +286,10 @@ fn resolve_token(node_config_path: impl AsRef, worker: &mut Option, worker: &mut Option, mut address: AddressOpt) -> Result { // Resolve the address port if needed - if address.port().is_none() { + if address.port.is_none() { // Resolve the worker and store the port of the checker let worker_cfg: WorkerConfig = resolve_worker_config(&node_config_path, worker.take())?; - *address.port_mut() = Some(worker_cfg.services.chk.address.port()); + address.port = Some(worker_cfg.services.chk.store); *worker = Some(worker_cfg); } @@ -283,6 +297,57 @@ fn resolve_addr_opt(node_config_path: impl AsRef, worker: &mut Option Result { + info!("Retrieving context from checker '{address}'"); + + // Prepare the request + let url: String = format!("http://{}{}", address, GET_CONTEXT_PATH.instantiated_path::(None)); + debug!("Building GET-request to '{url}'..."); + let client: Client = Client::new(); + let req: Request = match client.request(GET_CONTEXT_PATH.method, &url).bearer_auth(token).build() { + Ok(req) => req, + Err(err) => return Err(Error::RequestBuild { kind: "GET", addr: url, err }), + }; + + // Send it + debug!("Sending request to '{url}'..."); + let res: Response = match client.execute(req).await { + Ok(res) => res, + Err(err) => return Err(Error::RequestSend { kind: "GET", addr: url, err }), + }; + debug!("Server responded with {}", res.status()); + if !res.status().is_success() { + return Err(Error::RequestFailure { addr: url, code: res.status(), response: res.text().await.ok() }); + } + + // Attempt to parse the result as a list of policy versions + match res.text().await { + Ok(body) => { + // Log the full response first + debug!("Response:\n{}\n", BlockFormatter::new(&body)); + // Parse it as a [`Policy`] + match serde_json::from_str::(&body) { + Ok(body) => Ok(body.context), + Err(err) => Err(Error::ResponseDeserialize { addr: url, raw: body, err }), + } + }, + Err(err) => Err(Error::ResponseDownload { addr: url, err }), + } +} + /// Helper function that pulls a specific version's body from a checker. /// /// # Arguments @@ -291,18 +356,18 @@ fn resolve_addr_opt(node_config_path: impl AsRef, worker: &mut Option Result { +async fn get_version_body_from_checker(address: &Address, token: &str, version: u64) -> Result { info!("Retrieving policy '{version}' from checker '{address}'"); // Prepare the request - let url: String = format!("http://{}/{}", address, POLICY_API_GET_VERSION.1(version)); + let url: String = format!("http://{}{}", address, GET_VERSION_CONTENT_PATH.instantiated_path([version])); debug!("Building GET-request to '{url}'..."); let client: Client = Client::new(); - let req: Request = match client.request(POLICY_API_GET_VERSION.0, &url).bearer_auth(token).build() { + let req: Request = match client.request(GET_VERSION_CONTENT_PATH.method, &url).bearer_auth(token).build() { Ok(req) => req, Err(err) => return Err(Error::RequestBuild { kind: "GET", addr: url, err }), }; @@ -340,18 +405,18 @@ async fn get_version_body_from_checker(address: &Address, token: &str, version: /// - `token`: The token used for authenticating the checker. /// /// # Returns -/// A list of versions found on the remote checkers. +/// A map of versions to metadata found on the remote checkers. /// /// # Errors /// This function may error if we failed to reach the checker, failed to authenticate or failed to download/parse the result. -async fn get_versions_on_checker(address: &Address, token: &str) -> Result, Error> { +async fn get_versions_on_checker(address: &Address, token: &str) -> Result, Error> { info!("Retrieving policies on checker '{address}'"); // Prepare the request - let url: String = format!("http://{}/{}", address, POLICY_API_LIST_POLICIES.1); + let url: String = format!("http://{}{}", address, GET_VERSIONS_PATH.instantiated_path::(None)); debug!("Building GET-request to '{url}'..."); let client: Client = Client::new(); - let req: Request = match client.request(POLICY_API_LIST_POLICIES.0, &url).bearer_auth(token).build() { + let req: Request = match client.request(GET_VERSIONS_PATH.method, &url).bearer_auth(token).build() { Ok(req) => req, Err(err) => return Err(Error::RequestBuild { kind: "GET", addr: url, err }), }; @@ -373,8 +438,8 @@ async fn get_versions_on_checker(address: &Address, token: &str) -> Result Ok(body), + match serde_json::from_str::(&body) { + Ok(body) => Ok(body.versions), Err(err) => Err(Error::ResponseDeserialize { addr: url, raw: body, err }), } }, @@ -389,18 +454,18 @@ async fn get_versions_on_checker(address: &Address, token: &str) -> Result Result, Error> { +async fn get_active_version_on_checker(address: &Address, token: &str) -> Result, Error> { info!("Retrieving active policy of checker '{address}'"); // Prepare the request - let url: String = format!("http://{}/{}", address, POLICY_API_GET_ACTIVE_VERSION.1); + let url: String = format!("http://{}{}", address, GET_ACTIVE_VERSION_PATH.instantiated_path::(None)); debug!("Building GET-request to '{url}'..."); let client: Client = Client::new(); - let req: Request = match client.request(POLICY_API_GET_ACTIVE_VERSION.0, &url).bearer_auth(token).build() { + let req: Request = match client.request(GET_ACTIVE_VERSION_PATH.method, &url).bearer_auth(token).build() { Ok(req) => req, Err(err) => return Err(Error::RequestBuild { kind: "GET", addr: url, err }), }; @@ -425,8 +490,8 @@ async fn get_active_version_on_checker(address: &Address, token: &str) -> Result // Log the full response first debug!("Response:\n{}\n", BlockFormatter::new(&body)); // Parse it as a [`Policy`] - match serde_json::from_str(&body) { - Ok(body) => Ok(body), + match serde_json::from_str::(&body) { + Ok(body) => Ok(body.version), Err(err) => Err(Error::ResponseDeserialize { addr: url, raw: body, err }), } }, @@ -434,12 +499,40 @@ async fn get_active_version_on_checker(address: &Address, token: &str) -> Result } } + + +/// Prompts to supply a string with an optional value. +/// +/// # Arguments +/// - `what`: Some abstract description of what is prompted. Only used for error handling. +/// - `question`: The question to ask the input of. +/// - `default`: A default value to give, if any. +/// +/// # Returns +/// The information selected by the user. May be the `default` if given and the user selected it. +/// +/// # Errors +/// This function may error if we failed to query the user. +fn prompt_user_string(what: &'static str, question: impl Into, default: Option<&str>) -> Result { + // Ask the user using dialoguer, then return that version + let theme = ColorfulTheme::default(); + let mut prompt = dialoguer::Input::with_theme(&theme).with_prompt(question).show_default(default.is_some()); + if let Some(default) = default { + prompt = prompt.default(default.to_string()); + } + match prompt.interact() { + Ok(res) => Ok(res), + Err(err) => Err(Error::InputString { what, err }), + } +} + /// Prompts the user to select one of the given list of versions. /// /// # Arguments -/// - `address`: The address (or some other identifier) of the checker/source we retrieved the policy from. Only used for debugging. +/// - `question`: The question to ask the input of. /// - `active_version`: If there is any active version. /// - `versions`: The list of versions to select from. +/// - `exit`: Whether to provide an exit button to the prompt or not. /// /// # Returns /// An index into the given list, which is what the user selected. If `exit` is true, then this may return [`None`] when selected. @@ -447,36 +540,40 @@ async fn get_active_version_on_checker(address: &Address, token: &str) -> Result /// # Errors /// This function may error if we failed to query the user. fn prompt_user_version( - address: impl Into

, - active_version: Option, - versions: &[PolicyVersion], + question: impl Into, + active_version: Option, + versions: &HashMap, exit: bool, -) -> Result, Error> { +) -> Result, Error> { + // First: go by order + let mut ids: Vec = versions.keys().cloned().collect(); + ids.sort(); + // Preprocess the versions into neat representations let mut sversions: Vec = Vec::with_capacity(versions.len() + 1); - for (i, version) in versions.iter().enumerate() { - // Discard it if it has no version - if version.version.is_none() { - return Err(Error::PolicyWithoutVersion { addr: address.into(), which: format!("{i}th") }); - } + for id in &ids { + // Get the version for this ID + let version: &Metadata = versions.get(id).unwrap(); // See if it's selected to print either bold or not - let mut line: String = if version.version == active_version { style("Version ").bold().to_string() } else { "Version ".into() }; - line.push_str(&style(version.version.unwrap()).bold().green().to_string()); - if version.version == active_version { + let mut line: String = if active_version == Some(version.version) { style("Version ").bold().to_string() } else { "Version ".into() }; + line.push_str(&style(version.version).bold().green().to_string()); + if active_version == Some(version.version) { line.push_str( &style(format!( - " (created at {}, by {})", - version.created_at.format("%H:%M:%S %d-%m-%Y"), - version.creator.as_deref().unwrap_or("") + " (created at {}, by {} ({}))", + version.created.format("%H:%M:%S %d-%m-%Y"), + version.creator.name, + version.creator.id )) .to_string(), ); } else { line.push_str(&format!( - " (created at {}, by {})", - version.created_at.format("%H:%M:%S %d-%m-%Y"), - version.creator.as_deref().unwrap_or("") + " (created at {}, by {} ({}))", + version.created.format("%H:%M:%S %d-%m-%Y"), + version.creator.name, + version.creator.id )); } @@ -490,15 +587,11 @@ fn prompt_user_version( } // Ask the user using dialoguer, then return that version - match dialoguer::Select::with_theme(&ColorfulTheme::default()) - .with_prompt("Which version do you want to make active?") - .items(&sversions) - .interact() - { + match dialoguer::Select::with_theme(&ColorfulTheme::default()).with_prompt(question).items(&sversions).interact() { Ok(idx) => { if !exit || idx < versions.len() { // Exit wasn't selected - Ok(Some(idx)) + Ok(Some(ids[idx])) } else { // Exit was selected Ok(None) @@ -516,8 +609,8 @@ fn prompt_user_version( /// Defines supported reasoners in the checker. #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum TargetReasoner { - /// It's an eFLINT JSON Specification reasoner - EFlintJson(EFlintJsonVersion), + /// It's an eFLINT Haskell reasoner + EFlintHaskell, } impl TargetReasoner { /// Returns the string identifier of the reasoner that can be send to a checker. @@ -526,31 +619,7 @@ impl TargetReasoner { /// A [`String`] that the checker uses to verify if the sent policy matches the backend. pub fn id(&self) -> String { match self { - Self::EFlintJson(_) => "eflint-json".into(), - } - } - - /// Returns the string identifier of the reasoner version that can be send to a checker. - /// - /// # Returns - /// A [`String`] version that the checker uses to verify if the sent policy matches the backend. - pub fn version(&self) -> String { - match self { - Self::EFlintJson(v) => v.to_string(), - } - } -} - -/// Defines supported [`TargetReasoner::EFlintJson`] specification versions. -#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] -pub enum EFlintJsonVersion { - /// Specification version 0.1.0 (see ). - V0_1_0, -} -impl Display for EFlintJsonVersion { - fn fmt(&self, f: &mut Formatter<'_>) -> FResult { - match self { - Self::V0_1_0 => write!(f, "0.1.0"), + Self::EFlintHaskell => "eflint-haskell".into(), } } } @@ -567,7 +636,7 @@ impl Display for EFlintJsonVersion { /// - `version`: The version to activate in the checker. Should do some TUI stuff if not given. /// - `address`: The address on which to reach the checker. May be missing a port, to be resolved in the node.yml. /// - `token`: A token used for authentication with the remote checker. If omitted, will attempt to generate one based on the secret file in the node.yml file. -pub async fn activate(node_config_path: PathBuf, version: Option, address: AddressOpt, token: Option) -> Result<(), Error> { +pub async fn activate(node_config_path: PathBuf, version: Option, address: AddressOpt, token: Option) -> Result<(), Error> { info!( "Activating policy{} on checker of node defined by '{}'", if let Some(version) = &version { format!(" version '{version}'") } else { String::new() }, @@ -580,35 +649,34 @@ pub async fn activate(node_config_path: PathBuf, version: Option, address: let address: Address = resolve_addr_opt(&node_config_path, &mut worker, address)?; // Now we resolve the version - let version: i64 = if let Some(version) = version { + let version: u64 = if let Some(version) = version { version } else { // Alrighty; first, pull a list of all available versions from the checker - let mut versions: Vec = match get_versions_on_checker(&address, &token).await { + let versions: HashMap = match get_versions_on_checker(&address, &token).await { Ok(versions) => versions, Err(err) => return Err(Error::VersionsGet { addr: address, err: Box::new(err) }), }; // Then fetch the already active version - let active_version: Option = match get_active_version_on_checker(&address, &token).await { - Ok(version) => version.and_then(|v| v.version.version), + let active_version: Option = match get_active_version_on_checker(&address, &token).await { + Ok(version) => version, Err(err) => return Err(Error::ActiveVersionGet { addr: address, err: Box::new(err) }), }; // Prompt the user to select it - let idx: usize = match prompt_user_version(&address, active_version, &versions, false) { - Ok(Some(idx)) => idx, + match prompt_user_version("Which version do you want to make active?", active_version, &versions, false) { + Ok(Some(id)) => id, Ok(None) => unreachable!(), Err(err) => return Err(Error::PromptVersions { err: Box::new(err) }), - }; - versions.swap_remove(idx).version.unwrap() + } }; debug!("Activating policy version {version}"); // Now build the request and send it - let url: String = format!("http://{}/{}", address, POLICY_API_SET_ACTIVE_VERSION.1); + let url: String = format!("http://{}{}", address, ACTIVATE_PATH.instantiated_path::(None)); debug!("Building PUT-request to '{url}'..."); let client: Client = Client::new(); - let req: Request = match client.request(POLICY_API_SET_ACTIVE_VERSION.0, &url).bearer_auth(token).json(&SetVersionPostModel { version }).build() { + let req: Request = match client.request(ACTIVATE_PATH.method, &url).bearer_auth(token).json(&ActivateRequest { version }).build() { Ok(req) => req, Err(err) => return Err(Error::RequestBuild { kind: "GET", addr: url, err }), }; @@ -624,23 +692,6 @@ pub async fn activate(node_config_path: PathBuf, version: Option, address: return Err(Error::RequestFailure { addr: url, code: res.status(), response: res.text().await.ok() }); } - // Attempt to parse the result as a Policy - let res: Policy = match res.text().await { - Ok(body) => { - // Log the full response first - debug!("Response:\n{}\n", BlockFormatter::new(&body)); - // Parse it as a [`Policy`] - match serde_json::from_str(&body) { - Ok(body) => body, - Err(err) => return Err(Error::ResponseDeserialize { addr: url, raw: body, err }), - } - }, - Err(err) => return Err(Error::ResponseDownload { addr: url, err }), - }; - if res.version.version != Some(version) { - return Err(Error::InvalidPolicyActivated { addr: address, got: res.version.version, expected: Some(version) }); - } - // Done! println!("Successfully activated policy {} to checker {}.", style(version).bold().green(), style(address).bold().green(),); Ok(()) @@ -695,6 +746,18 @@ pub async fn add( (input.into(), false) }; + // Query the user for some metadata + debug!("Prompting user (you!) for metadata..."); + let name: String = prompt_user_string( + "for a policy name", + "Provide a descriptive name of the policy", + input.file_name().map(OsStr::to_string_lossy).as_ref().map(Cow::as_ref), + )?; + debug!("Policy name: {name:?}"); + let description: String = + prompt_user_string("for a policy description", "Provide a short description of the policy", Some("A very dope policy"))?; + debug!("Policy description: {description:?}"); + // If the language is not given, resolve it from the file extension let language: PolicyInputLanguage = if let Some(language) = language { debug!("Interpreting input as {language}"); @@ -705,8 +768,6 @@ pub async fn add( // Else, attempt to resolve from the extension if ext == OsStr::new("eflint") { PolicyInputLanguage::EFlint - } else if ext == OsStr::new("json") { - PolicyInputLanguage::EFlintJson } else if from_stdin { return Err(Error::UnspecifiedInputLanguage); } else { @@ -720,48 +781,33 @@ pub async fn add( }; // Read the input file - let (json, target_reasoner): (String, TargetReasoner) = match language { + let (eflint, target_reasoner): (String, TargetReasoner) = match language { PolicyInputLanguage::EFlint => { // We read it as eFLINT to JSON - debug!("Compiling eFLINT input file '{}' to eFLINT JSON", input.display()); - let mut json: Vec = Vec::new(); - if let Err(err) = eflint_to_json::compile_async(&input, &mut json, None).await { - return Err(Error::InputToJson { path: input, err }); - } - - // Serialize it to a string - match String::from_utf8(json) { - Ok(json) => (json, TargetReasoner::EFlintJson(EFlintJsonVersion::V0_1_0)), - Err(err) => panic!("{}", trace!(("eflint_to_json::compile_async() did not return valid UTF-8"), err)), - } - }, - PolicyInputLanguage::EFlintJson => { - // Read the file in one go - debug!("Reading eFLINT JSON input file '{}'", input.display()); + debug!("Reading input file {:?}...", input.display()); match tfs::read_to_string(&input).await { - Ok(json) => (json, TargetReasoner::EFlintJson(EFlintJsonVersion::V0_1_0)), - Err(err) => return Err(Error::InputRead { path: input, err }), + Ok(phrases) => (phrases, TargetReasoner::EFlintHaskell), + Err(err) => panic!("{}", trace!(("serde_json::from_slice() did not return a serializable policy"), err)), } }, }; - // Ensure it is JSON - debug!("Deserializing input as JSON..."); - let json: Box = match serde_json::from_str(&json) { - Ok(json) => json, - Err(err) => return Err(Error::InputDeserialize { path: input, raw: json, err }), - }; + // Ask the checker for the reasoner context + let context: EFlintHaskellReasonerWithInterfaceContext = get_context_from_checker(&address, &token).await?; // Finally, construct a request for the checker - let url: String = format!("http://{}/{}", address, POLICY_API_ADD_VERSION.1); + let url: String = format!("http://{}{}", address, ADD_VERSION_PATH.instantiated_path::(None)); debug!("Building POST-request to '{url}'..."); let client: Client = Client::new(); - let contents: AddPolicyPostModel = AddPolicyPostModel { - version_description: "".into(), - description: None, - content: vec![PolicyContentPostModel { reasoner: target_reasoner.id(), reasoner_version: target_reasoner.version(), content: json }], + let contents: AddVersionRequest = AddVersionRequest { + metadata: AttachedMetadata { + name, + description, + language: format!("{}-{}", target_reasoner.id(), base16ct::lower::encode_string(&context.base_policy_hash)), + }, + contents: eflint, }; - let req: Request = match client.request(POLICY_API_ADD_VERSION.0, &url).bearer_auth(token).json(&contents).build() { + let req: Request = match client.request(ADD_VERSION_PATH.method, &url).bearer_auth(token).json(&contents).build() { Ok(req) => req, Err(err) => return Err(Error::RequestBuild { kind: "POST", addr: url, err }), }; @@ -778,7 +824,7 @@ pub async fn add( } // Log the response body - let body: Policy = match res.text().await { + let body: AddVersionResponse = match res.text().await { Ok(body) => { // Log the full response first debug!("Response:\n{}\n", BlockFormatter::new(&body)); @@ -793,10 +839,10 @@ pub async fn add( // Done! println!( - "Successfully added policy {} to checker {}{}.", + "Successfully added policy {} to checker {} as version {}.", style(if from_stdin { "".into() } else { input.display().to_string() }).bold().green(), style(address).bold().green(), - if let Some(version) = body.version.version { format!(" as version {}", style(version).bold().green()) } else { String::new() } + style(body.version).bold().green() ); Ok(()) } @@ -821,34 +867,43 @@ pub async fn list(node_config_path: PathBuf, address: AddressOpt, token: Option< let address: Address = resolve_addr_opt(&node_config_path, &mut worker, address)?; // Send the request to the reasoner to fetch the active versions - let mut versions: Vec = match get_versions_on_checker(&address, &token).await { + let versions: HashMap = match get_versions_on_checker(&address, &token).await { Ok(versions) => versions, Err(err) => return Err(Error::VersionsGet { addr: address, err: Box::new(err) }), }; // Then fetch the already active version - let active_version: Option = match get_active_version_on_checker(&address, &token).await { - Ok(version) => version.and_then(|v| v.version.version), + let active_version: Option = match get_active_version_on_checker(&address, &token).await { + Ok(version) => version, Err(err) => return Err(Error::ActiveVersionGet { addr: address, err: Box::new(err) }), }; // Enter a loop where we let the user decide for themselves loop { // Display them to the user, with name, to select the policy they want to see more info about - let idx: usize = match prompt_user_version(&address, active_version, &versions, true) { + let version: u64 = match prompt_user_version("Select a version to inspect:", active_version, &versions, true) { Ok(Some(idx)) => idx, - Ok(None) => break, + Ok(None) => return Ok(()), Err(err) => return Err(Error::PromptVersions { err: Box::new(err) }), }; - let version: i64 = versions.swap_remove(idx).version.unwrap(); // Attempt to pull this version from the remote - let _version: Policy = match get_version_body_from_checker(&address, &token, version).await { - Ok(version) => version, + let contents: Value = match get_version_body_from_checker(&address, &token, version).await { + Ok(contents) => contents, Err(err) => return Err(Error::VersionGetBody { addr: address, version, err: Box::new(err) }), }; - } - // TODO: Finish this. The idea is show a particular version to the user, then re-enter the loop until they quit - // (empty version, as above) - todo!(); + // Render it + let md: &Metadata = versions.get(&version).unwrap(); + println!("Policy {} ({})", style(format!("{:?}", md.attached.name)).bold().green(), style(md.version).bold()); + println!(" For {}", style(format!("{:?}", md.attached.language)).bold()); + println!(" By {} ({})", style(format!("{:?}", md.creator.name)).bold(), style(format!("{:?}", md.creator.id)).bold()); + println!(" At {}", style(DateTime::::from(md.created).format("%Y-%m-%d %H:%M:%S")).bold()); + println!(" {:?}", md.attached.description); + println!("{}", "-".repeat(80)); + if let Err(err) = serde_json::to_writer_pretty(std::io::stdout(), &contents) { + return Err(Error::VersionSerialize { version, err }); + } + println!("{}", "-".repeat(80)); + println!(); + } } diff --git a/brane-ctl/src/spec.rs b/brane-ctl/src/spec.rs index 143d5d31..311eb869 100644 --- a/brane-ctl/src/spec.rs +++ b/brane-ctl/src/spec.rs @@ -4,7 +4,7 @@ // Created: // 21 Nov 2022, 17:27:52 // Last edited: -// 08 Feb 2024, 17:08:25 +// 01 May 2025, 10:43:08 // Auto updated? // Yes // @@ -249,15 +249,12 @@ where pub enum PolicyInputLanguage { /// It's human-friendly eFLINT EFlint, - /// It's machine-friendly eFLINT JSON. - EFlintJson, } impl Display for PolicyInputLanguage { fn fmt(&self, f: &mut Formatter<'_>) -> FResult { use PolicyInputLanguage::*; match self { EFlint => write!(f, "eFLINT"), - EFlintJson => write!(f, "eFLINT JSON"), } } } @@ -267,7 +264,6 @@ impl FromStr for PolicyInputLanguage { fn from_str(s: &str) -> Result { match s { "eflint" => Ok(Self::EFlint), - "eflint-json" => Ok(Self::EFlintJson), raw => Err(PolicyInputLanguageParseError::Unknown { raw: raw.into() }), } } @@ -427,19 +423,19 @@ pub enum GenerateNodeSubcommand { /// Custom hash file path. #[clap( long, - default_value = "$CONFIG/policy_deliberation_secret.json", - help = "The location of the `policy_deliberation_secret.json` file that is used to verify authentication on the deliberation endpoint \ - in the checker. Use '$CONFIG' to reference the value given by --config-path." + default_value = "$CONFIG/policy_delib_secret.json", + help = "The location of the `policy_delib_secret.json` file that is used to verify authentication on the deliberation endpoint in the \ + checker. Use '$CONFIG' to reference the value given by --config-path." )] - policy_deliberation_secret: PathBuf, + policy_delib_secret: PathBuf, /// Custom hash file path. #[clap( long, - default_value = "$CONFIG/policy_expert_secret.json", - help = "The location of the `policy_expert_secret.json` file that is used to verify authentication on the policy expert endpoint in the \ + default_value = "$CONFIG/policy_store_secret.json", + help = "The location of the `policy_store_secret.json` file that is used to verify authentication on the policy store endpoint in the \ checker. Use '$CONFIG' to reference the value given by --config-path." )] - policy_expert_secret: PathBuf, + policy_store_secret: PathBuf, /// Custom audit log path (optional) #[clap( long, @@ -520,7 +516,10 @@ pub enum GenerateNodeSubcommand { job_port: u16, /// The address on which to launch the checker service. #[clap(long, default_value = "50053", help = "The port on which the local checker service is available.")] - chk_port: u16, + chk_delib_port: u16, + /// The address on which to launch the checker service's storage API. + #[clap(long, default_value = "50054", help = "The port on which the storage API of the local checker service is available.")] + chk_store_port: u16, /// The port of the proxy service. #[clap(short, long, default_value = "50050", help = "The port on which the local proxy service is available.")] prx_port: u16, diff --git a/brane-ctl/src/upgrade.rs b/brane-ctl/src/upgrade.rs index 31cc0e27..ca68f77b 100644 --- a/brane-ctl/src/upgrade.rs +++ b/brane-ctl/src/upgrade.rs @@ -4,7 +4,7 @@ // Created: // 03 Jul 2023, 13:01:31 // Last edited: -// 07 Mar 2024, 09:54:40 +// 29 Apr 2025, 13:51:21 // Auto updated? // Yes // @@ -21,11 +21,12 @@ use std::fs::{self, DirEntry}; use std::path::{Path, PathBuf}; use std::str::FromStr as _; -use brane_cfg::node::WorkerUsecase; +use brane_cfg::node::{DoublePrivateService, WorkerUsecase}; use brane_shr::input::input; use console::style; use log::{debug, info, warn}; use serde::Serialize; +use specifications::address::Host; use specifications::version::Version; use crate::old_configs::v1_0_0; @@ -399,18 +400,18 @@ pub fn node(path: impl Into, dry_run: bool, overwrite: bool, version: V name: central.names.api, address: Address::from_str(¢ral.services.api.to_string()).unwrap(), bind: central.ports.api, - external_address: Address::Hostname(format!("http://{hostname}"), central.services.api.port()), + external_address: Address::hostname(format!("http://{hostname}"), central.services.api.port()), }, drv: PublicService { - address: Address::Hostname(format!("grpc://{}", central.names.drv), central.ports.drv.port()), + address: Address::hostname(format!("grpc://{}", central.names.drv), central.ports.drv.port()), name: central.names.drv, bind: central.ports.drv, - external_address: Address::Hostname(format!("grpc://{hostname}"), central.ports.drv.port()), + external_address: Address::hostname(format!("grpc://{hostname}"), central.ports.drv.port()), }, #[allow(unreachable_code)] plr: PrivateService { name: central.names.plr, address: unimplemented!(), bind: unimplemented!() }, prx: PrivateOrExternalService::Private(PrivateService { - address: Address::Hostname(format!("http://{}", cfg.names.prx), cfg.ports.prx.port()), + address: Address::hostname(format!("http://{}", cfg.names.prx), cfg.ports.prx.port()), name: cfg.names.prx, bind: cfg.ports.prx, }), @@ -435,8 +436,8 @@ pub fn node(path: impl Into, dry_run: bool, overwrite: bool, version: V backend: worker.paths.backend, policy_database: worker.paths.policies, - policy_deliberation_secret: "NOT YET IMPLEMENTED".into(), - policy_expert_secret: "NOT YET IMPLEMENTED".into(), + policy_delib_secret: "NOT YET IMPLEMENTED".into(), + policy_store_secret: "NOT YET IMPLEMENTED".into(), policy_audit_log: None, proxy: Some(proxy_path), @@ -451,21 +452,26 @@ pub fn node(path: impl Into, dry_run: bool, overwrite: bool, version: V name: worker.names.reg, address: Address::from_str(&worker.services.reg.to_string()).unwrap(), bind: worker.ports.reg, - external_address: Address::Hostname(format!("https://{hostname}"), worker.services.reg.port()), + external_address: Address::hostname(format!("https://{hostname}"), worker.services.reg.port()), }, job: PublicService { - address: Address::Hostname(format!("grpc://{}", worker.names.job), worker.ports.job.port()), + address: Address::hostname(format!("grpc://{}", worker.names.job), worker.ports.job.port()), name: worker.names.job, bind: worker.ports.job, - external_address: Address::Hostname(format!("https://{hostname}"), worker.ports.job.port()), + external_address: Address::hostname(format!("https://{hostname}"), worker.ports.job.port()), }, - chk: PrivateService { - name: worker.names.chk, - address: Address::from_str(&worker.services.chk.to_string()).unwrap(), - bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), worker.services.chk.port()).into(), + chk: DoublePrivateService { + name: worker.names.chk, + host: match &worker.services.chk { + v1_0_0::Address::Ipv4(addr, _) => Host::IPv4(*addr), + v1_0_0::Address::Ipv6(addr, _) => Host::IPv6(*addr), + v1_0_0::Address::Hostname(name, _) => Host::Name(name.clone()), + }, + delib: worker.services.chk.port(), + store: worker.services.chk.port() + 1, }, prx: PrivateOrExternalService::Private(PrivateService { - address: Address::Hostname(format!("http://{}", cfg.names.prx), cfg.ports.prx.port()), + address: Address::hostname(format!("http://{}", cfg.names.prx), cfg.ports.prx.port()), name: cfg.names.prx, bind: cfg.ports.prx, }), diff --git a/brane-ctl/src/wizard.rs b/brane-ctl/src/wizard.rs index 1ad256ae..26240474 100644 --- a/brane-ctl/src/wizard.rs +++ b/brane-ctl/src/wizard.rs @@ -4,7 +4,7 @@ // Created: // 01 Jun 2023, 12:43:20 // Last edited: -// 07 Mar 2024, 09:54:57 +// 14 Nov 2024, 14:58:50 // Auto updated? // Yes // @@ -346,9 +346,9 @@ pub fn query_proxy_node_config() -> Result { services: node::ProxyServices { prx: node::PublicService { name: "brane-prx".into(), - address: Address::Hostname("test.com".into(), 42), + address: Address::hostname("test.com", 42), bind: std::net::SocketAddr::V4(std::net::SocketAddrV4::new(std::net::Ipv4Addr::new(0, 0, 0, 0), 0)), - external_address: Address::Hostname("test.com".into(), 42), + external_address: Address::hostname("test.com", 42), }, }, }), diff --git a/brane-drv/Cargo.toml b/brane-drv/Cargo.toml index 2264cb46..ee39a89d 100644 --- a/brane-drv/Cargo.toml +++ b/brane-drv/Cargo.toml @@ -16,6 +16,7 @@ enum-debug.workspace = true env_logger = "0.11.0" error-trace.workspace = true log = "0.4.22" +prost = "0.12.0" # rdkafka = { version = "0.31", features = ["cmake-build"] } serde_json = "1.0.120" serde_json_any_key = "2.0.0" @@ -27,7 +28,7 @@ tonic = "0.12.0" # Workspace dependencies reqwest = { workspace = true } -brane-ast = { path = "../brane-ast" } +policy-reasoner = { git = "https://github.com/BraneFramework/policy-reasoner", branch = "lib-refactor" } brane-cfg = { path = "../brane-cfg" } brane-exe = { path = "../brane-exe" } brane-prx = { path = "../brane-prx" } diff --git a/brane-drv/src/check.rs b/brane-drv/src/check.rs index 6350f27b..bcf43c59 100644 --- a/brane-drv/src/check.rs +++ b/brane-drv/src/check.rs @@ -4,7 +4,7 @@ // Created: // 06 Feb 2024, 11:46:14 // Last edited: -// 08 Feb 2024, 14:39:13 +// 02 May 2025, 15:09:05 // Auto updated? // Yes // @@ -15,20 +15,20 @@ use std::error; use std::fmt::{Display, Formatter, Result as FResult}; -use brane_ast::Workflow; -use brane_ast::ast::Edge; -use brane_ast::func_id::FunctionId; use brane_cfg::infra::{InfraFile, InfraLocation}; -use brane_exe::pc::ProgramCounter; use brane_shr::formatters::BlockFormatter; use enum_debug::EnumDebug as _; use log::{debug, info}; +use policy_reasoner::spec::reasonerconn::ReasonerResponse; +use policy_reasoner::spec::reasons::ManyReason; use reqwest::{Client, Request, Response, StatusCode}; -use serde_json::Value; use specifications::address::Address; +use specifications::checking::deliberation::{CheckResponse, CheckTaskRequest, CheckTransferRequest, CheckWorkflowRequest, Prost}; use specifications::data::{AvailabilityKind, DataName, PreprocessKind}; -use specifications::registering::{CheckTransferReply, CheckTransferRequest}; -use specifications::working::{self, JobServiceClient}; +use specifications::pc::ProgramCounter; +use specifications::wir::func_id::FunctionId; +use specifications::wir::{Edge, Workflow}; +use specifications::working::JobServiceClient; use tokio::task::JoinHandle; @@ -151,11 +151,11 @@ impl error::Error for Error { /// /// # Errors /// This future may error if it failed to send the request. -async fn request_workflow(checker: String, address: Address, id: String, sworkflow: String) -> RequestOutput { +async fn request_workflow(checker: String, address: Address, id: String, workflow: Workflow) -> RequestOutput { info!("Spawning workflow-validation request to validate workflow '{id}' with checker '{checker}'"); // Create the request - let req: working::CheckWorkflowRequest = working::CheckWorkflowRequest { use_case: "central".into(), workflow: sworkflow.clone() }; + let req: CheckWorkflowRequest = CheckWorkflowRequest { usecase: "central".into(), workflow: workflow.clone() }; // Connect to the worker debug!("[workflow '{id}' -> '{checker}'] Connecting to worker '{address}'..."); @@ -166,14 +166,17 @@ async fn request_workflow(checker: String, address: Address, id: String, sworkfl // Send the request debug!("[workflow '{id}' -> '{checker}'] Sending CheckRequest to worker '{address}'..."); - let res: working::CheckReply = match client.check_workflow(req).await { - Ok(res) => res.into_inner(), + let res: CheckResponse> = match client.check_workflow(Prost::::new(req)).await { + Ok(res) => res.into_inner().into_inner(), Err(err) => return Err(Error::WorkerCheck { domain: checker, addr: address, err }), }; // Evaluate the worker's response - debug!("[workflow '{id}' -> '{checker}'] Worker '{address}' replied with {}", if res.verdict { "ALLOW" } else { "DENY" }); - if res.verdict { Ok(None) } else { Ok(Some((checker, res.reasons))) } + debug!( + "[workflow '{id}' -> '{checker}'] Worker '{address}' replied with {}", + if matches!(res.verdict, ReasonerResponse::Success) { "ALLOW" } else { "DENY" } + ); + if let ReasonerResponse::Violated(reasons) = res.verdict { Ok(Some((checker, reasons.into_iter().collect()))) } else { Ok(None) } } /// The future that sends a request to assert a dataset transfer's permission. @@ -191,16 +194,12 @@ async fn request_workflow(checker: String, address: Address, id: String, sworkfl /// /// # Errors /// This future may error if it failed to send the request. -async fn request_transfer(checker: String, address: Address, id: String, vworkflow: Value, task: ProgramCounter, data: DataName) -> RequestOutput { +async fn request_transfer(checker: String, address: Address, id: String, workflow: Workflow, task: ProgramCounter, data: DataName) -> RequestOutput { info!("Spawning task-execute request to validate task '{task}' in workflow '{id}' with checker '{checker}'"); // Create the request let url: String = format!("{address}/{}/check/{}", if data.is_data() { "data" } else { "results" }, data.name()); - let req: CheckTransferRequest = CheckTransferRequest { - use_case: "central".into(), - workflow: vworkflow, - task: Some((if task.is_main() { None } else { Some(task.func_id.id() as u64) }, task.edge_idx as u64)), - }; + let req: CheckTransferRequest = CheckTransferRequest { usecase: "central".into(), workflow, task: Some(task), input: data.name().into() }; // Create the request debug!("[task '{id}' -> '{checker}'] Connecting to worker '{address}'..."); @@ -225,14 +224,17 @@ async fn request_transfer(checker: String, address: Address, id: String, vworkfl Ok(res) => res, Err(err) => return Err(Error::RegistryResponseDownload { domain: checker, addr: address, err }), }; - let res: CheckTransferReply = match serde_json::from_str(&res) { + let res: CheckResponse> = match serde_json::from_str(&res) { Ok(res) => res, Err(err) => return Err(Error::RegistryResponseParse { domain: checker, addr: address, raw: res, err }), }; // Evaluate the worker's response - debug!("[task '{id}' -> '{checker}'] Worker '{address}' replied with {}", if res.verdict { "ALLOW" } else { "DENY" }); - if res.verdict { Ok(None) } else { Ok(Some((checker, res.reasons))) } + debug!( + "[task '{id}' -> '{checker}'] Worker '{address}' replied with {}", + if matches!(res.verdict, ReasonerResponse::Success) { "ALLOW" } else { "DENY" } + ); + if let ReasonerResponse::Violated(reasons) = res.verdict { Ok(Some((checker, reasons.into_iter().collect()))) } else { Ok(None) } } /// The future that sends a request to assert a task execution's permission. @@ -249,12 +251,11 @@ async fn request_transfer(checker: String, address: Address, id: String, vworkfl /// /// # Errors /// This future may error if it failed to send the request. -async fn request_execute(checker: String, address: Address, id: String, sworkflow: String, task: ProgramCounter) -> RequestOutput { +async fn request_execute(checker: String, address: Address, id: String, workflow: Workflow, task: ProgramCounter) -> RequestOutput { info!("Spawning task-execute request to validate task '{task}' in workflow '{id}' with checker '{checker}'"); // Create the request - let req: working::CheckTaskRequest = - working::CheckTaskRequest { use_case: "central".into(), workflow: sworkflow.clone(), task_id: serde_json::to_string(&task).unwrap() }; + let req: CheckTaskRequest = CheckTaskRequest { usecase: "central".into(), workflow, task }; // Connect to the worker debug!("[task '{id}' -> '{checker}'] Connecting to worker '{address}'..."); @@ -265,14 +266,17 @@ async fn request_execute(checker: String, address: Address, id: String, sworkflo // Send the request debug!("[task '{id}' -> '{checker}'] Sending CheckTaskRequest to worker '{address}'..."); - let res: working::CheckReply = match client.check_task(req).await { - Ok(res) => res.into_inner(), + let res: CheckResponse> = match client.check_task(Prost::::new(req)).await { + Ok(res) => res.into_inner().into_inner(), Err(err) => return Err(Error::WorkerCheck { domain: checker, addr: address, err }), }; // Evaluate the worker's response - debug!("[task '{id}' -> '{checker}'] Worker '{address}' replied with {}", if res.verdict { "ALLOW" } else { "DENY" }); - if res.verdict { Ok(None) } else { Ok(Some((checker, res.reasons))) } + debug!( + "[task '{id}' -> '{checker}'] Worker '{address}' replied with {}", + if matches!(res.verdict, ReasonerResponse::Success) { "ALLOW" } else { "DENY" } + ); + if let ReasonerResponse::Violated(reasons) = res.verdict { Ok(Some((checker, reasons.into_iter().collect()))) } else { Ok(None) } } @@ -285,8 +289,6 @@ async fn request_execute(checker: String, address: Address, id: String, sworkflo /// # Arguments /// - `infra`: An [`InfraFile`] that determines all workers known to us. /// - `workflow`: The [`Workflow`] to generate requests for. -/// - `vworkflow`: An already serialized, yet still abstract-as-JSON counterpart to `workflow`. -/// - `sworkflow`: An already (fully) serialized counterpart to `workflow`. /// - `pc`: A [`ProgramCounter`] that denotes which edge we're investigating. /// - `breakpoint`: An optional [`ProgramCounter`] that, when given, will force termination once `pc` is the same. /// - `handles`: The list of [`JoinHandle`]s on which to push new ones for every request we find. @@ -296,8 +298,6 @@ async fn request_execute(checker: String, address: Address, id: String, sworkflo fn traverse_and_request( infra: &InfraFile, workflow: &Workflow, - vworkflow: &Value, - sworkflow: &String, mut pc: ProgramCounter, breakpoint: Option, handles: &mut Vec<(String, JoinHandle)>, @@ -372,7 +372,7 @@ fn traverse_and_request( location.clone(), info.registry.clone(), workflow.id.clone(), - vworkflow.clone(), + workflow.clone(), pc, dataname.clone(), )), @@ -393,7 +393,7 @@ fn traverse_and_request( None => return Err(Error::UnknownExecutor { id: workflow.id.clone(), node: pc, domain: at.clone() }), }; handles - .push((at.clone(), tokio::spawn(request_execute(at.clone(), info.delegate.clone(), workflow.id.clone(), sworkflow.clone(), pc)))); + .push((at.clone(), tokio::spawn(request_execute(at.clone(), info.delegate.clone(), workflow.id.clone(), workflow.clone(), pc)))); // Alright done continue pc = pc.jump(*next); @@ -407,10 +407,10 @@ fn traverse_and_request( Branch { true_next, false_next, merge } => { // Recurse into the true next - traverse_and_request(infra, workflow, vworkflow, sworkflow, pc.jump(*true_next), merge.map(|m| pc.jump(m)), handles)?; + traverse_and_request(infra, workflow, pc.jump(*true_next), merge.map(|m| pc.jump(m)), handles)?; // Recurse into the false next, if any if let Some(false_next) = false_next { - traverse_and_request(infra, workflow, vworkflow, sworkflow, pc.jump(*false_next), merge.map(|m| pc.jump(m)), handles)?; + traverse_and_request(infra, workflow, pc.jump(*false_next), merge.map(|m| pc.jump(m)), handles)?; } // Continue with the merge, if any @@ -424,7 +424,7 @@ fn traverse_and_request( Parallel { branches, merge } => { // Recurse into each branch for b in branches { - traverse_and_request(infra, workflow, vworkflow, sworkflow, pc.jump(*b), Some(pc.jump(*merge)), handles)?; + traverse_and_request(infra, workflow, pc.jump(*b), Some(pc.jump(*merge)), handles)?; } pc = pc.jump(*merge); continue; @@ -436,9 +436,9 @@ fn traverse_and_request( Loop { cond, body, next } => { // Recurse into the condition - traverse_and_request(infra, workflow, vworkflow, sworkflow, pc.jump(*cond), Some(pc.jump(*body - 1)), handles)?; + traverse_and_request(infra, workflow, pc.jump(*cond), Some(pc.jump(*body - 1)), handles)?; // Recurse into the body - traverse_and_request(infra, workflow, vworkflow, sworkflow, pc.jump(*body), Some(pc.jump(*cond)), handles)?; + traverse_and_request(infra, workflow, pc.jump(*body), Some(pc.jump(*cond)), handles)?; // Continue with next if let Some(next) = next { pc = pc.jump(*next); @@ -479,26 +479,16 @@ fn traverse_and_request( /// /// Request failure must be checked at join time. pub fn spawn_requests(infra: &InfraFile, workflow: &Workflow) -> Result)>, Error> { - // Serialize the workflow once - let vworkflow: Value = match serde_json::to_value(workflow) { - Ok(swf) => swf, - Err(err) => return Err(Error::WorkflowSerialize { id: workflow.id.clone(), err }), - }; - let sworkflow: String = match serde_json::to_string(&vworkflow) { - Ok(swf) => swf, - Err(err) => return Err(Error::WorkflowSerialize { id: workflow.id.clone(), err }), - }; - // Spawn the workflow-global requests for every checker let mut handles: Vec<(String, JoinHandle)> = Vec::with_capacity(4 * infra.len()); for (name, info) in infra { - handles.push((name.clone(), tokio::spawn(request_workflow(name.clone(), info.delegate.clone(), workflow.id.clone(), sworkflow.clone())))); + handles.push((name.clone(), tokio::spawn(request_workflow(name.clone(), info.delegate.clone(), workflow.id.clone(), workflow.clone())))); } // Delegate to a recursive function that traverses the workflow that does the other two types - traverse_and_request(infra, workflow, &vworkflow, &sworkflow, ProgramCounter::start(), None, &mut handles)?; + traverse_and_request(infra, workflow, ProgramCounter::start(), None, &mut handles)?; for id in workflow.funcs.keys() { - traverse_and_request(infra, workflow, &vworkflow, &sworkflow, ProgramCounter::start_of(*id), None, &mut handles)?; + traverse_and_request(infra, workflow, ProgramCounter::start_of(*id), None, &mut handles)?; } // Done diff --git a/brane-drv/src/handler.rs b/brane-drv/src/handler.rs index 70cab82a..a3482ee4 100644 --- a/brane-drv/src/handler.rs +++ b/brane-drv/src/handler.rs @@ -4,7 +4,7 @@ // Created: // 12 Sep 2022, 16:18:11 // Last edited: -// 07 Mar 2024, 14:20:06 +// 02 May 2025, 15:15:55 // Auto updated? // Yes // @@ -17,7 +17,6 @@ use std::str::FromStr; use std::sync::Arc; use std::time::Instant; -use brane_ast::Workflow; use brane_cfg::info::Info; use brane_cfg::infra::InfraFile; use brane_cfg::node::{CentralConfig, NodeConfig, NodeSpecificConfig}; @@ -31,6 +30,7 @@ use error_trace::{ErrorTrace as _, trace}; use log::{debug, error, info}; use specifications::driving::{CheckReply, CheckRequest, CreateSessionReply, CreateSessionRequest, DriverService, ExecuteReply, ExecuteRequest}; use specifications::profiling::ProfileReport; +use specifications::wir::Workflow; use tokio::sync::mpsc; use tokio::task::JoinHandle; use tokio_stream::wrappers::ReceiverStream; diff --git a/brane-drv/src/planner.rs b/brane-drv/src/planner.rs index 67cf7106..c920c42a 100644 --- a/brane-drv/src/planner.rs +++ b/brane-drv/src/planner.rs @@ -4,7 +4,7 @@ // Created: // 25 Oct 2022, 11:35:00 // Last edited: -// 08 Feb 2024, 17:27:11 +// 29 Apr 2025, 14:00:54 // Auto updated? // Yes // @@ -14,7 +14,6 @@ /***** LIBRARY *****/ -use brane_ast::Workflow; use brane_tsk::errors::PlanError; use brane_tsk::spec::{AppId, TaskId}; use log::debug; @@ -23,6 +22,7 @@ use serde_json::Value; use specifications::address::Address; use specifications::planning::{PlanningDeniedReply, PlanningReply, PlanningRequest}; use specifications::profiling::ProfileScopeHandle; +use specifications::wir::Workflow; /***** LIBRARY *****/ @@ -58,7 +58,7 @@ impl InstancePlanner { // Populate a "PlanningRequest" with that (i.e., just populate a future record with the string) debug!("Sending request..."); let remote = prof.time(format!("workflow '{task_id}' on brane-plr")); - let url: String = format!("{plr}/plan"); + let url: String = format!("http://{plr}/plan"); let client: Client = Client::new(); let req: Request = client.post(&url).body(sreq).build().map_err(|source| PlanError::PlanningRequest { id: workflow.id.clone(), diff --git a/brane-drv/src/vm.rs b/brane-drv/src/vm.rs index 94ead93f..39a21b96 100644 --- a/brane-drv/src/vm.rs +++ b/brane-drv/src/vm.rs @@ -4,7 +4,7 @@ // Created: // 27 Oct 2022, 10:14:26 // Last edited: -// 07 Mar 2024, 14:18:12 +// 02 May 2025, 15:10:41 // Auto updated? // Yes // @@ -19,13 +19,9 @@ use std::path::{Path, PathBuf}; use std::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard}; use async_trait::async_trait; -use brane_ast::Workflow; -use brane_ast::func_id::FunctionId; -use brane_ast::locations::Location; use brane_cfg::info::Info as _; use brane_cfg::infra::InfraFile; use brane_cfg::node::{CentralConfig, NodeConfig, NodeSpecificConfig}; -use brane_exe::pc::ProgramCounter; use brane_exe::spec::{TaskInfo, VmPlugin}; use brane_exe::{Error as VmError, FullValue, RunState, Vm}; use brane_prx::client::ProxyClient; @@ -36,7 +32,11 @@ use log::{debug, info, warn}; use serde_json_any_key::MapIterToJson; use specifications::address::Address; use specifications::data::{AccessKind, DataName, PreprocessKind}; +use specifications::pc::ProgramCounter; use specifications::profiling::ProfileScopeHandle; +use specifications::wir::Workflow; +use specifications::wir::func_id::FunctionId; +use specifications::wir::locations::Location; use specifications::working::TransferRegistryTar; use specifications::{driving as driving_grpc, working as working_grpc}; use tokio::sync::mpsc::Sender; @@ -88,7 +88,7 @@ impl VmPlugin for InstancePlugin { // Resolve the location to an address (and get the proxy while we have a lock anyway) let disk = prof.time("File loading"); - let (proxy, delegate_address, workflow): (Arc, Address, String) = { + let (proxy, delegate_address, workflow): (Arc, String, String) = { // Load the node config file to get the path to... let state: RwLockReadGuard = global.read().unwrap(); @@ -96,7 +96,7 @@ impl VmPlugin for InstancePlugin { match state.infra.as_ref().unwrap().get(&loc) { Some(info) => ( state.proxy.clone(), - info.delegate.clone(), + format!("grpc://{}", info.delegate), state.workflow.clone().unwrap_or_else(|| panic!("Workflow state not injected by the time the workflow is being executed")), ), None => { @@ -170,14 +170,14 @@ impl VmPlugin for InstancePlugin { // Resolve the location to an address (and get the proxy and the workflow while we have a lock anyway) let disk = prof.time("File loading"); - let (proxy, delegate_address, workflow): (Arc, Address, String) = { + let (proxy, delegate_address, workflow): (Arc, String, String) = { let state: RwLockReadGuard = global.read().unwrap(); // Resolve to an address and return that with the other addresses ( state.proxy.clone(), match state.infra.as_ref().unwrap().get(info.location) { - Some(info) => info.delegate.clone(), + Some(info) => format!("grpc://{}", info.delegate), None => { return Err(ExecuteError::UnknownLocationError { loc: info.location.clone() }); }, @@ -435,12 +435,12 @@ impl VmPlugin for InstancePlugin { // Resolve the location to an address (and get the proxy client while at it) let disk = prof.time("File loading"); - let (proxy, delegate_address): (Arc, Address) = { + let (proxy, delegate_address): (Arc, String) = { let state: RwLockReadGuard = global.read().unwrap(); // Resolve to an address match state.infra.as_ref().unwrap().get(loc) { - Some(info) => (state.proxy.clone(), info.delegate.clone()), + Some(info) => (state.proxy.clone(), format!("grpc://{}", info.delegate)), None => { return Err(CommitError::UnknownLocationError { loc: loc.clone() }); }, diff --git a/brane-dsl/src/data_type.rs b/brane-dsl/src/data_type.rs index cd505ca1..a4819df2 100644 --- a/brane-dsl/src/data_type.rs +++ b/brane-dsl/src/data_type.rs @@ -4,7 +4,7 @@ // Created: // 23 Aug 2022, 20:34:33 // Last edited: -// 17 Jan 2023, 15:14:01 +// 14 Nov 2024, 17:09:58 // Auto updated? // Yes // @@ -17,6 +17,7 @@ use std::mem::discriminant; use std::str::FromStr; use serde::{Deserialize, Serialize}; +use specifications::wir::builtins::{BuiltinClasses, BuiltinFunctions}; /***** LIBRARY *****/ @@ -54,6 +55,27 @@ impl Display for FunctionSignature { } } +impl From for FunctionSignature { + #[inline] + fn from(value: BuiltinFunctions) -> Self { Self::from(&value) } +} +impl From<&BuiltinFunctions> for FunctionSignature { + #[inline] + fn from(value: &BuiltinFunctions) -> Self { + match value { + BuiltinFunctions::Print => Self::new(vec![DataType::String], DataType::Void), + BuiltinFunctions::PrintLn => Self::new(vec![DataType::String], DataType::Void), + + BuiltinFunctions::Len => Self::new(vec![DataType::Array(Box::new(DataType::Any))], DataType::Integer), + + BuiltinFunctions::CommitResult => Self::new( + vec![DataType::String, DataType::Class(BuiltinClasses::IntermediateResult.name().into())], + DataType::Class(BuiltinClasses::Data.name().into()), + ), + } + } +} + /// Defines a Class' signature (i.e., unique type information). diff --git a/brane-dsl/src/spec.rs b/brane-dsl/src/spec.rs index ee11da30..c91f2444 100644 --- a/brane-dsl/src/spec.rs +++ b/brane-dsl/src/spec.rs @@ -4,7 +4,7 @@ // Created: // 10 Aug 2022, 14:03:04 // Last edited: -// 16 Nov 2022, 16:40:19 +// 14 Nov 2024, 16:07:35 // Auto updated? // Yes // @@ -237,66 +237,6 @@ impl FromStr for Language { -/// Defines merge strategies for the parallel statements. -#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Hash, Serialize)] -pub enum MergeStrategy { - /// Take the value that arrived first. The statement will already return as soon as this statement is in, not the rest. - First, - /// Take the value that arrived first. The statement will still block until all values returned. - FirstBlocking, - /// Take the value that arrived last. - Last, - - /// Add all the resulting values together. This means that they must all be numeric. - Sum, - /// Multiple all the resulting values together. This means that they must all be numeric. - Product, - - /// Take the largest value. Use on booleans to get an 'OR'-effect (i.e., it returns true iff there is at least one true). - Max, - /// Take the smallest value. Use on booleans to get an 'AND'-effect (i.e., it returns false iff there is at least one false). - Min, - - /// Returns all values as an Array. - All, - - /// No merge strategy needed - None, -} - -impl From<&str> for MergeStrategy { - #[inline] - fn from(value: &str) -> Self { - match value.to_lowercase().as_str() { - "first" => Self::First, - "first*" => Self::FirstBlocking, - "last" => Self::Last, - - "+" | "sum" => Self::Sum, - "*" | "product" => Self::Product, - - "max" => Self::Max, - "min" => Self::Min, - - "all" => Self::All, - - _ => Self::None, - } - } -} - -impl From<&String> for MergeStrategy { - #[inline] - fn from(value: &String) -> Self { Self::from(value.as_str()) } -} - -impl From for MergeStrategy { - #[inline] - fn from(value: String) -> Self { Self::from(value.as_str()) } -} - - - // /// Defines the supported data types in BraneScript/Bakery. // #[derive(Clone, Debug, Eq, PartialEq, Hash)] // pub enum DataType { diff --git a/brane-exe/Cargo.toml b/brane-exe/Cargo.toml index f7b6ea0e..cdf30622 100644 --- a/brane-exe/Cargo.toml +++ b/brane-exe/Cargo.toml @@ -16,7 +16,6 @@ enum-debug.workspace = true futures = "0.3.24" lazy_static = "1.4.0" log = "0.4.22" -num-traits = "0.2.18" serde = "1.0.204" serde_json = "1.0.120" thiserror = "2.0.0" diff --git a/brane-exe/src/dummy.rs b/brane-exe/src/dummy.rs index ba4dce66..ef6e377a 100644 --- a/brane-exe/src/dummy.rs +++ b/brane-exe/src/dummy.rs @@ -4,7 +4,7 @@ // Created: // 13 Sep 2022, 16:43:11 // Last edited: -// 31 Jan 2024, 11:36:37 +// 02 May 2025, 15:11:13 // Auto updated? // Yes // @@ -18,16 +18,16 @@ use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex, MutexGuard, RwLock, RwLockReadGuard, RwLockWriteGuard}; use async_trait::async_trait; -use brane_ast::ast::{Edge, SymTable}; -use brane_ast::locations::Location; -use brane_ast::{DataType, Workflow}; use log::info; use specifications::data::{AccessKind, AvailabilityKind, DataName}; +use specifications::pc::ProgramCounter; use specifications::profiling::ProfileScopeHandle; +use specifications::wir::data_type::DataType; +use specifications::wir::locations::Location; +use specifications::wir::{Edge, SymTable, Workflow}; pub use crate::errors::DummyVmError as Error; use crate::errors::VmError; -use crate::pc::ProgramCounter; use crate::spec::{CustomGlobalState, RunState, TaskInfo, VmPlugin}; use crate::value::FullValue; use crate::vm::Vm; diff --git a/brane-exe/src/errors.rs b/brane-exe/src/errors.rs index ab4a65c2..a76c31a7 100644 --- a/brane-exe/src/errors.rs +++ b/brane-exe/src/errors.rs @@ -4,7 +4,7 @@ // Created: // 26 Aug 2022, 18:01:09 // Last edited: -// 31 Jan 2024, 11:36:09 +// 14 Nov 2024, 17:21:49 // Auto updated? // Yes // @@ -15,14 +15,14 @@ use std::error::Error; use std::path::PathBuf; -use brane_ast::func_id::FunctionId; -use brane_ast::{DataType, MergeStrategy}; use console::style; use enum_debug::EnumDebug as _; use specifications::data::DataName; +use specifications::pc::ProgramCounter; use specifications::version::Version; - -use crate::pc::ProgramCounter; +use specifications::wir::data_type::DataType; +use specifications::wir::func_id::FunctionId; +use specifications::wir::merge_strategy::MergeStrategy; /***** HELPER FUNCTIONS *****/ diff --git a/brane-exe/src/frame_stack.rs b/brane-exe/src/frame_stack.rs index d1bdeef2..6b93c1b7 100644 --- a/brane-exe/src/frame_stack.rs +++ b/brane-exe/src/frame_stack.rs @@ -4,7 +4,7 @@ // Created: // 12 Sep 2022, 10:45:50 // Last edited: -// 16 Jan 2024, 15:23:14 +// 14 Nov 2024, 17:21:53 // Auto updated? // Yes // @@ -16,12 +16,12 @@ use std::collections::HashMap; use std::sync::Arc; -use brane_ast::DataType; -use brane_ast::ast::{SymTable, VarDef}; -use brane_ast::func_id::FunctionId; +use specifications::pc::ProgramCounter; +use specifications::wir::data_type::DataType; +use specifications::wir::func_id::FunctionId; +use specifications::wir::{SymTable, VarDef}; pub use crate::errors::FrameStackError as Error; -use crate::pc::ProgramCounter; use crate::value::Value; diff --git a/brane-exe/src/lib.rs b/brane-exe/src/lib.rs index 2c03567a..1f48d8c8 100644 --- a/brane-exe/src/lib.rs +++ b/brane-exe/src/lib.rs @@ -4,7 +4,7 @@ // Created: // 09 Sep 2022, 11:54:53 // Last edited: -// 16 Jan 2024, 15:15:43 +// 14 Nov 2024, 17:21:35 // Auto updated? // Yes // @@ -18,7 +18,6 @@ pub mod spec; // pub mod vtable; pub mod dummy; pub mod frame_stack; -pub mod pc; pub mod stack; pub mod thread; pub mod value; diff --git a/brane-exe/src/spec.rs b/brane-exe/src/spec.rs index 1eed1e15..ea6e92f4 100644 --- a/brane-exe/src/spec.rs +++ b/brane-exe/src/spec.rs @@ -4,7 +4,7 @@ // Created: // 26 Aug 2022, 18:26:40 // Last edited: -// 31 Jan 2024, 11:36:19 +// 14 Nov 2024, 17:22:02 // Auto updated? // Yes // @@ -18,15 +18,15 @@ use std::error::Error; use std::path::Path; use std::sync::{Arc, RwLock}; -use brane_ast::ast::SymTable; -use brane_ast::locations::Location; use specifications::data::{AccessKind, DataName, PreprocessKind}; use specifications::package::Capability; +use specifications::pc::ProgramCounter; use specifications::profiling::ProfileScopeHandle; use specifications::version::Version; +use specifications::wir::SymTable; +use specifications::wir::locations::Location; use crate::frame_stack::FrameStack; -use crate::pc::ProgramCounter; use crate::value::FullValue; diff --git a/brane-exe/src/thread.rs b/brane-exe/src/thread.rs index 5ca1aacf..437276df 100644 --- a/brane-exe/src/thread.rs +++ b/brane-exe/src/thread.rs @@ -4,7 +4,7 @@ // Created: // 09 Sep 2022, 13:23:41 // Last edited: -// 23 Jul 2024, 01:31:41 +// 02 May 2025, 15:10:26 // Auto updated? // Yes // @@ -19,16 +19,18 @@ use std::path::PathBuf; use std::sync::{Arc, RwLock}; use async_recursion::async_recursion; -use brane_ast::ast::{ClassDef, ComputeTaskDef, Edge, EdgeInstr, FunctionDef, TaskDef}; -use brane_ast::func_id::FunctionId; -use brane_ast::locations::Location; -use brane_ast::spec::{BuiltinClasses, BuiltinFunctions}; -use brane_ast::{DataType, MergeStrategy, Workflow}; use enum_debug::EnumDebug as _; use futures::future::{BoxFuture, FutureExt}; use log::debug; use specifications::data::{AccessKind, AvailabilityKind, DataName}; +use specifications::pc::ProgramCounter; use specifications::profiling::{ProfileScopeHandle, ProfileScopeHandleOwned}; +use specifications::wir::builtins::{BuiltinClasses, BuiltinFunctions}; +use specifications::wir::data_type::DataType; +use specifications::wir::func_id::FunctionId; +use specifications::wir::locations::Location; +use specifications::wir::merge_strategy::MergeStrategy; +use specifications::wir::{ClassDef, ComputeTaskDef, Edge, EdgeInstr, FunctionDef, TaskDef, Workflow}; use tokio::spawn; use tokio::task::JoinHandle; @@ -36,7 +38,6 @@ use crate::dbg_node; use crate::errors::ReturnEdge; pub use crate::errors::VmError as Error; use crate::frame_stack::FrameStack; -use crate::pc::ProgramCounter; use crate::spec::{CustomGlobalState, CustomLocalState, RunState, TaskInfo, VmPlugin}; use crate::stack::Stack; use crate::value::{FullValue, Value}; diff --git a/brane-exe/src/value.rs b/brane-exe/src/value.rs index f4a1b2e3..02a82f87 100644 --- a/brane-exe/src/value.rs +++ b/brane-exe/src/value.rs @@ -4,7 +4,7 @@ // Created: // 20 Sep 2022, 13:44:07 // Last edited: -// 16 Jan 2024, 15:13:18 +// 14 Nov 2024, 17:23:12 // Auto updated? // Yes // @@ -15,13 +15,13 @@ use std::collections::HashMap; use std::fmt::{Display, Formatter, Result as FResult}; -use brane_ast::SymTable; -use brane_ast::data_type::DataType; -use brane_ast::func_id::FunctionId; -use brane_ast::spec::BuiltinClasses; use serde::de::Visitor; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_json::Value as JValue; +use specifications::wir::SymTable; +use specifications::wir::builtins::BuiltinClasses; +use specifications::wir::data_type::DataType; +use specifications::wir::func_id::FunctionId; pub use crate::errors::ValueError as Error; diff --git a/brane-exe/src/vm.rs b/brane-exe/src/vm.rs index e99ddd94..afd51857 100644 --- a/brane-exe/src/vm.rs +++ b/brane-exe/src/vm.rs @@ -4,7 +4,7 @@ // Created: // 12 Sep 2022, 17:41:33 // Last edited: -// 12 Dec 2023, 17:20:22 +// 02 May 2025, 15:10:31 // Auto updated? // Yes // @@ -16,8 +16,8 @@ use std::sync::{Arc, RwLock}; use async_trait::async_trait; -use brane_ast::{SymTable, Workflow}; use specifications::profiling::ProfileScopeHandle; +use specifications::wir::{SymTable, Workflow}; use crate::errors::VmError; use crate::spec::{CustomGlobalState, CustomLocalState, RunState, VmPlugin}; @@ -90,7 +90,7 @@ pub trait Vm { /// - `prof`: The ProfileScope that can be used to provide additional information about the timings of the VM (framework-wise, not user-wise). /// /// # Returns - /// The result if the Workflow returned any. + /// The result if the Workflow returned any, together with the edge that produced it. Only if the workflow is empty will this be [`None`]. async fn run>( this: Arc>, snippet: Workflow, diff --git a/brane-job/Cargo.toml b/brane-job/Cargo.toml index 277774b5..2132c7f0 100644 --- a/brane-job/Cargo.toml +++ b/brane-job/Cargo.toml @@ -12,7 +12,6 @@ base64 = "0.22.0" bollard = "0.18.0" chrono = "0.4.35" clap = { version = "4.5.6", features = ["derive","env"] } -deliberation = { git = "https://github.com/braneframework/policy-reasoner" } dotenvy = "0.15.0" enum-debug.workspace = true env_logger = "0.11.0" @@ -34,8 +33,9 @@ tonic = "0.12.0" # Workspace dependencies reqwest = { workspace = true, features = ["json","stream","multipart"] } -brane-ast = { path = "../brane-ast" } +policy-reasoner = { git = "https://github.com/BraneFramework/policy-reasoner", branch = "lib-refactor" } brane-cfg = { path = "../brane-cfg" } +brane-chk = { path = "../brane-chk" } brane-exe = { path = "../brane-exe" } brane-prx = { path = "../brane-prx" } brane-shr = { path = "../brane-shr" } diff --git a/brane-job/src/cli.rs b/brane-job/src/cli.rs index d8951b76..55714d77 100644 --- a/brane-job/src/cli.rs +++ b/brane-job/src/cli.rs @@ -11,6 +11,9 @@ pub(crate) struct Cli { /// Whether to keep containers after execution or not. #[clap(long, action, help = "If given, will not remove job containers after removing them.", env = "KEEP_CONTAINERS")] pub(crate) keep_containers: bool, + /// The token to authenticate ourselves with the checker with. + #[clap(long, help = "A token to authenticate to the given Checker service with.", env = "CHECKER_DELIB_TOKEN")] + pub(crate) delib_token: String, /// Node environment metadata store. #[clap( diff --git a/brane-job/src/main.rs b/brane-job/src/main.rs index 94b1ff3f..02281820 100644 --- a/brane-job/src/main.rs +++ b/brane-job/src/main.rs @@ -4,7 +4,7 @@ // Created: // 18 Oct 2022, 13:47:17 // Last edited: -// 14 Jun 2024, 15:14:12 +// 29 Apr 2025, 14:00:59 // Auto updated? // Yes // @@ -69,7 +69,12 @@ async fn main() { // let xenon_endpoint = utilities::ensure_http_schema(&opts.xenon, !opts.debug)?; // Start the JobHandler - let server = match WorkerServer::new(opts.node_config_path, opts.keep_containers, Arc::new(ProxyClient::new(worker.services.prx.address()))) { + let server = match WorkerServer::new( + opts.node_config_path, + opts.keep_containers, + opts.delib_token, + Arc::new(ProxyClient::new(worker.services.prx.address())), + ) { Ok(svr) => svr, Err(err) => { error!("{}", trace!(("Failed to create WorkerServer"), err)); diff --git a/brane-job/src/worker.rs b/brane-job/src/worker.rs index 98d1bfe0..1e6ab97b 100644 --- a/brane-job/src/worker.rs +++ b/brane-job/src/worker.rs @@ -4,7 +4,7 @@ // Created: // 31 Oct 2022, 11:21:14 // Last edited: -// 01 May 2024, 10:39:39 +// 02 May 2025, 15:07:09 // Auto updated? // Yes // @@ -14,27 +14,22 @@ //! execution to publicizing/committing. // +use std::borrow::Cow; use std::collections::{HashMap, HashSet}; use std::error; use std::ffi::OsStr; use std::fmt::{Display, Formatter, Result as FResult}; use std::path::{Path, PathBuf}; -use std::str::FromStr as _; use std::sync::Arc; -use std::time::Duration; use base64::Engine as _; use base64::engine::general_purpose::STANDARD; use bollard::API_DEFAULT_VERSION; -use brane_ast::Workflow; -use brane_ast::ast::{ComputeTaskDef, TaskDef}; -use brane_ast::func_id::FunctionId; -use brane_ast::locations::Location; use brane_cfg::backend::{BackendFile, Credentials}; use brane_cfg::info::Info as _; use brane_cfg::node::{NodeConfig, NodeSpecificConfig, WorkerConfig}; +use brane_chk::workflow::compile::pc_to_id; use brane_exe::FullValue; -use brane_exe::pc::ProgramCounter; use brane_prx::client::ProxyClient; use brane_prx::spec::NewPathRequestTlsOptions; use brane_shr::formatters::BlockFormatter; @@ -45,30 +40,28 @@ use brane_tsk::errors::{AuthorizeError, CommitError, ExecuteError, PreprocessErr use brane_tsk::spec::JobStatus; use brane_tsk::tools::decode_base64; use chrono::Utc; -use deliberation::spec::Verdict; -// use deliberation::spec::ExecuteTaskRequest; use enum_debug::EnumDebug as _; use error_trace::{ErrorTrace as _, trace}; use futures_util::StreamExt; use hyper::body::Bytes; -// use kube::config::Kubeconfig; use log::{debug, error, info, warn}; -use reqwest::{Method, header}; -use serde::{Deserialize, Serialize}; +use policy_reasoner::spec::reasonerconn::ReasonerResponse; +use policy_reasoner::spec::reasons::ManyReason; +use reqwest::Method; use serde_json_any_key::json_to_map; use specifications::address::Address; -// use brane_tsk::k8s::{self, K8sOptions}; -use specifications::checking::{DELIBERATION_API_EXECUTE_TASK, DELIBERATION_API_WORKFLOW}; +use specifications::checking::deliberation::{CHECK_TASK_PATH, CHECK_WORKFLOW_PATH, CheckResponse, CheckTaskRequest, CheckWorkflowRequest, Prost}; use specifications::container::{Image, VolumeBind}; use specifications::data::{AccessKind, AssetInfo, DataName}; use specifications::package::{Capability, PackageIndex, PackageInfo, PackageKind}; +use specifications::pc::ProgramCounter; use specifications::profiling::{ProfileReport, ProfileScopeHandle}; use specifications::registering::DownloadAssetRequest; use specifications::version::Version; -use specifications::working::{ - CheckReply, CheckTaskRequest, CheckWorkflowRequest, CommitReply, CommitRequest, ExecuteReply, ExecuteRequest, JobService, PreprocessReply, - PreprocessRequest, TaskStatus, -}; +use specifications::wir::func_id::FunctionId; +use specifications::wir::locations::Location; +use specifications::wir::{ComputeTaskDef, TaskDef, Workflow}; +use specifications::working::{CommitReply, CommitRequest, ExecuteReply, ExecuteRequest, JobService, PreprocessReply, PreprocessRequest, TaskStatus}; use tokio::fs as tfs; use tokio::io::AsyncWriteExt; use tokio::sync::mpsc::{self, Sender}; @@ -190,39 +183,6 @@ impl error::Error for Error { -/***** HELPER STRUCTURES *****/ -/// Manual copy of the [policy-reasoner](https://github.com/braneframework/policy-reasoner)'s `ExecuteTaskRequest`-struct. -/// -/// This is necessary because, when we pull the dependency directly, we get conflicts because that repository depends on the git version of this repository, meaning its notion of a Workflow is always (practically) outdated. -#[derive(Clone, Debug, Deserialize, Serialize)] -struct PolicyExecuteRequest { - /// Some identifier that allows the policy reasoner to assume a different context. - /// - /// Note that not any identifier is accepted. Which are depends on which plugins used. - pub use_case: String, - /// The workflow that is being examined. - pub workflow: Workflow, - /// The ID (i.e., program counter) of the call that we want to authorize. - pub task_id: ProgramCounter, -} - -/// Manual copy of the [policy-reasoner](https://github.com/braneframework/policy-reasoner)'s `WorkflowValidationRequest`-struct. -/// -/// This is necessary because, when we pull the dependency directly, we get conflicts because that repository depends on the git version of this repository, meaning its notion of a Workflow is always (practically) outdated. -#[derive(Clone, Debug, Deserialize, Serialize)] -struct PolicyValidateRequest { - /// Some identifier that allows the policy reasoner to assume a different context. - /// - /// Note that not any identifier is accepted. Which are depends on which plugins used. - pub use_case: String, - /// Workflow definition - pub workflow: Workflow, -} - - - - - /***** AUXILLARY STRUCTURES *****/ /// Helper structure for grouping together task-dependent "constants", but that are not part of the task itself. #[derive(Clone, Debug)] @@ -326,7 +286,7 @@ impl TaskInfo { /// - `worker_cfg`: The configuration for this node's environment. For us, contains the path where we may find certificates and where to download data & result files to. /// - `proxy`: The proxy client we use to proxy the data transfer. /// - `use_case`: A string denoting which use-case (registry) we're using. -/// - `pc`: The ProgramCounter of the edge that provides context for this preprocessing. If omitted, should be interpreted as that the context is retrieving the workflow result instead. +/// - `pc`: The ProgramCounter of the edge that provides context for this preprocessing. If empty, denotes we're talking about a return instead of an input. /// - `workflow`: A [`Workflow`] that is given as context to the registry. /// - `location`: The location to download the tarball from. /// - `dataname`: The name of the dataset to preprocess. @@ -435,7 +395,7 @@ async fn preprocess_transfer_tar_local( // Send a reqwest debug!("Sending download request..."); let download = prof.time("Downloading"); - let url: String = format!("{}/{}/download/{}", address, if dataname.is_data() { "data" } else { "results" }, dataname.name()); + let url: String = format!("https://{}/{}/download/{}", address, if dataname.is_data() { "data" } else { "results" }, dataname.name()); let res = proxy .get_with_body(&url, Some(NewPathRequestTlsOptions { location: location.clone(), use_client_auth: true }), &DownloadAssetRequest { use_case: use_case.into(), @@ -572,6 +532,7 @@ pub async fn preprocess_transfer_tar( /// # Arguments /// - `worker_cfg`: The configuration for this node's environment. For us, contains if and where we should proxy the request through and where we may find the checker. /// - `use_case`: A string denoting which use-case (registry) we're using. +/// - `delib_token`: A token used to access the checker's deliberation API. /// - `workflow`: The workflow to check. /// - `call`: A program counter that identifies which call in the workflow we'll be checkin'. /// @@ -583,30 +544,31 @@ pub async fn preprocess_transfer_tar( async fn assert_task_permission( worker_cfg: &WorkerConfig, use_case: &str, + delib_token: &str, workflow: &Workflow, call: ProgramCounter, ) -> Result { - info!("Checking task '{}' execution permission with checker '{}'...", call, worker_cfg.services.chk.address); + info!("Checking task '{}' execution permission with checker '{}'...", call, worker_cfg.services.chk.host); // Alrighty tighty, let's begin by building the request for the checker debug!("Constructing checker request..."); - let body: PolicyExecuteRequest = PolicyExecuteRequest { use_case: use_case.into(), workflow: workflow.clone(), task_id: call }; + let body: CheckTaskRequest = CheckTaskRequest { usecase: use_case.into(), workflow: workflow.clone(), task: call }; - // Next, generate a JWT to inject in the request - let jwt: String = specifications::policy::generate_policy_token( - if let Some(user) = &*workflow.user { user.as_str() } else { "UNKNOWN" }, - &worker_cfg.name, - Duration::from_secs(60), - &worker_cfg.paths.policy_deliberation_secret, - ) - .map_err(|source| AuthorizeError::TokenGenerate { secret: worker_cfg.paths.policy_deliberation_secret.clone(), source })?; + // // Next, generate a JWT to inject in the request + // let jwt: String = specifications::policy::generate_policy_token( + // if let Some(user) = &*workflow.user { user.as_str() } else { "UNKNOWN" }, + // &worker_cfg.name, + // Duration::from_secs(60), + // &worker_cfg.paths.policy_delib_secret, + // ) + // .map_err(|source| AuthorizeError::TokenGenerate { secret: worker_cfg.paths.policy_delib_secret.clone(), source })?; // Prepare the request to send let client: reqwest::Client = reqwest::Client::builder().build().map_err(|source| AuthorizeError::ClientBuild { source })?; - let addr: String = format!("{}/{}", worker_cfg.services.chk.address, DELIBERATION_API_EXECUTE_TASK.1); + let addr: String = format!("http://{}:{}{}", worker_cfg.services.chk.host, worker_cfg.services.chk.delib, CHECK_TASK_PATH.path); let req: reqwest::Request = client - .request(DELIBERATION_API_EXECUTE_TASK.0, &addr) - .header(header::AUTHORIZATION, format!("Bearer {jwt}")) + .request(CHECK_TASK_PATH.method, &addr) + .bearer_auth(delib_token) .json(&body) .build() .map_err(|source| AuthorizeError::ExecuteRequestBuild { addr: addr.clone(), source })?; @@ -621,18 +583,18 @@ async fn assert_task_permission( return Err(AuthorizeError::ExecuteRequestFailure { addr: addr.clone(), code: res.status(), err: res.text().await.ok() }); } let res: String = res.text().await.map_err(|source| AuthorizeError::ExecuteBodyDownload { addr: addr.clone(), source })?; - let res: Verdict = + let res: CheckResponse> = serde_json::from_str(&res).map_err(|source| AuthorizeError::ExecuteBodyDeserialize { addr: addr.clone(), raw: res, source })?; // Now match the checker's response - match res { - Verdict::Allow(_) => { + match res.verdict { + ReasonerResponse::Success => { info!("Checker ALLOWED execution of task {}", call); Ok(true) }, - Verdict::Deny(_) => { - info!("Checker DENIED execution of task {}", call); + ReasonerResponse::Violated(reasons) => { + info!("Checker DENIED execution of task {}{}", call, reasons.into_iter().map(|r| format!(" - {r}\n")).collect::()); Ok(false) }, } @@ -642,6 +604,7 @@ async fn assert_task_permission( /// /// # Arguments /// -` node_config_path`: The path to a `node.yml` file that defines the environment (such as checker location). +/// - `delib_token`: Some JWT that we use to authenticate ourselves at the checker. /// - `request`: The body of the request, which is either a [`CheckWorkflowRequest`] or a [`CheckTaskRequest`]. /// /// # Returns @@ -649,12 +612,12 @@ async fn assert_task_permission( /// /// # Errors /// This function may error if we failed to read the `node.yml` file or if we failed to contact the checker. -async fn check_workflow_or_task(node_config_path: &Path, request: CheckRequest) -> Result, Status> { - let (use_case, workflow, task_id): (String, String, Option) = match request { - CheckRequest::Workflow(CheckWorkflowRequest { use_case, workflow }) => (use_case, workflow, None), - CheckRequest::Task(CheckTaskRequest { use_case, workflow, task_id }) => (use_case, workflow, Some(task_id)), - }; - debug!("Consulting checker to find validity for use-case '{use_case}'"); +async fn check_workflow_or_task( + node_config_path: &Path, + delib_token: &str, + request: CheckRequest, +) -> Result>>>, Status> { + debug!("Consulting checker to find validity for use-case {:?}", request.usecase()); // Load the worker config from the node config to setup the profiler let worker_cfg: WorkerConfig = match NodeConfig::from_path(node_config_path) { @@ -673,83 +636,51 @@ async fn check_workflow_or_task(node_config_path: &Path, request: CheckRequest) let report = ProfileReport::auto_reporting_file("brane-job WorkerServer::check-workflow", format!("brane-job_{}_check-workflow", worker_cfg.name)); - // Attempt to parse the workflow - let par = report.time("Parsing"); - let workflow: Workflow = match serde_json::from_str(&workflow) { - Ok(workflow) => workflow, - Err(err) => { - error!("{}", trace!(("Failed to deserialize workflow"), err)); - debug!("Workflow:\n{}\n{}\n{}\n", (0..80).map(|_| '-').collect::(), workflow, (0..80).map(|_| '-').collect::()); - return Err(Status::invalid_argument(format!("{}", trace!(("Failed to deserialize workflow"), err)))); - }, - }; - par.stop(); - // Alrighty tighty, let's begin by building the request for the checker let send = report.time("Checker request"); debug!("Constructing checker request..."); - let (method, url, body): (Method, String, String) = if let Some(task_id) = task_id { - // Parse the task ID as a ProgramCounter - let pc: ProgramCounter = match ProgramCounter::from_str(&task_id) { - Ok(pc) => pc, - Err(err) => { - debug!("{}", trace!(("Failed to parse '{task_id}' as program counter"), err)); - return Err(Status::invalid_argument(format!("{}", trace!(("Failed to parse '{task_id}' as program counter"), err)))); - }, - }; - - // It's a task request - ( - DELIBERATION_API_EXECUTE_TASK.0, - format!("{}/{}", worker_cfg.services.chk.address, DELIBERATION_API_EXECUTE_TASK.1), - match serde_json::to_string(&PolicyExecuteRequest { use_case: use_case.clone(), workflow: workflow.clone(), task_id: pc }) { - Ok(req) => req, - Err(err) => { - error!("{}", trace!(("Could not deserialize PolicyExecuteRequest"), err)); - return Err(Status::internal("An internal error occurred")); + let (method, url, body): (Method, String, String) = match &request { + CheckRequest::Workflow(req) => { + // It's a workflow request + ( + CHECK_WORKFLOW_PATH.method, + format!("http://{}:{}{}", worker_cfg.services.chk.host, worker_cfg.services.chk.delib, CHECK_WORKFLOW_PATH.path), + match serde_json::to_string(req) { + Ok(req) => req, + Err(err) => { + error!("{}", trace!(("Could not deserialize PolicyExecuteRequest"), err)); + return Err(Status::internal("An internal error occurred")); + }, }, - }, - ) - } else { - // It's a workflow request - ( - DELIBERATION_API_WORKFLOW.0, - format!("{}/{}", worker_cfg.services.chk.address, DELIBERATION_API_WORKFLOW.1), - match serde_json::to_string(&PolicyValidateRequest { use_case: use_case.clone(), workflow: workflow.clone() }) { - Ok(req) => req, - Err(err) => { - error!("{}", trace!(("Could not deserialize PolicyExecuteRequest"), err)); - return Err(Status::internal("An internal error occurred")); + ) + }, + CheckRequest::Task(req) => { + // It's a task request + ( + CHECK_TASK_PATH.method, + format!("http://{}:{}{}", worker_cfg.services.chk.host, worker_cfg.services.chk.delib, CHECK_TASK_PATH.path), + match serde_json::to_string(req) { + Ok(req) => req, + Err(err) => { + error!("{}", trace!(("Could not deserialize PolicyExecuteRequest"), err)); + return Err(Status::internal("An internal error occurred")); + }, }, - }, - ) + ) + }, }; - // Next, generate a JWT to inject in the request - let jwt: String = specifications::policy::generate_policy_token( - if let Some(user) = &*workflow.user { user.as_str() } else { "UNKNOWN" }, - &worker_cfg.name, - Duration::from_secs(60), - &worker_cfg.paths.policy_deliberation_secret, - ) - .map_err(|source| { - let err = AuthorizeError::TokenGenerate { secret: worker_cfg.paths.policy_deliberation_secret.clone(), source }; - error!("{}", err.trace()); - Status::internal("An internal error occurred") - })?; - // Prepare the request to send let client: reqwest::Client = reqwest::Client::builder().build().map_err(|source| { let err = AuthorizeError::ClientBuild { source }; error!("{}", err.trace()); Status::internal("An internal error occurred") })?; - let req: reqwest::Request = - client.request(method, &url).header(header::AUTHORIZATION, format!("Bearer {jwt}")).body(body).build().map_err(|source| { - let err = AuthorizeError::ExecuteRequestBuild { addr: url.clone(), source }; - error!("{}", err.trace()); - Status::internal("An internal error occurred") - })?; + let req: reqwest::Request = client.request(method, &url).bearer_auth(delib_token).body(body).build().map_err(|source| { + let err = AuthorizeError::ExecuteRequestBuild { addr: url.clone(), source }; + error!("{}", err.trace()); + Status::internal("An internal error occurred") + })?; // Send it debug!("Sending request to '{url}'..."); @@ -766,31 +697,34 @@ async fn check_workflow_or_task(node_config_path: &Path, request: CheckRequest) error!("{}", err.trace()); return Err(Status::internal("An internal error occurred")); } - - let res: String = res.text().await.map_err(|source| { - let err = AuthorizeError::ExecuteBodyDownload { addr: url.clone(), source }; - error!("{}", err.trace()); - Status::internal("An internal error occurred") - })?; - - let res: Verdict = serde_json::from_str(&res).map_err(|source| { - let err = AuthorizeError::ExecuteBodyDeserialize { addr: url.clone(), raw: res, source }; - error!("{}", err.trace()); - Status::internal("An internal error occurred") - })?; - + let res: String = match res.text().await { + Ok(res) => res, + Err(source) => { + let err = AuthorizeError::ExecuteBodyDownload { addr: url, source }; + error!("{}", err.trace()); + return Err(Status::internal("An internal error occurred")); + }, + }; + let res: CheckResponse> = match serde_json::from_str(&res) { + Ok(res) => res, + Err(source) => { + let err = AuthorizeError::ExecuteBodyDeserialize { addr: url, raw: res, source }; + error!("{}", err.trace()); + return Err(Status::internal("An internal error occurred")); + }, + }; send.stop(); // Now match the checker's response - match res { - Verdict::Allow(_) => { - info!("Checker ALLOWED execution of workflow"); - Ok(Response::new(CheckReply { verdict: true, reasons: vec![] })) + match res.verdict { + ReasonerResponse::Success => { + info!("Checker ALLOWED execution of {}", request.what()); + Ok(Response::new(Prost::>>::new(CheckResponse { verdict: ReasonerResponse::Success }))) }, - Verdict::Deny(deny) => { - info!("Checker DENIED execution of workflow"); - Ok(Response::new(CheckReply { verdict: false, reasons: deny.reasons_for_denial.unwrap_or_else(Vec::new) })) + ReasonerResponse::Violated(reasons) => { + info!("Checker DENIED execution of {}{}", request.what(), reasons.iter().map(|r| format!(" - {r}\n")).collect::()); + Ok(Response::new(Prost::>>::new(CheckResponse { verdict: ReasonerResponse::Violated(reasons) }))) }, } } @@ -1080,7 +1014,7 @@ async fn execute_task_local( let name: String = match exec.time_fut("spawn overhead", docker::launch(&dinfo, info)).await { Ok(name) => name, Err(err) => { - return Err(JobStatus::CreationFailed(format!("Failed to spawn container: {err}"))); + return Err(JobStatus::CreationFailed(trace!(("Failed to spawn container"), err).to_string())); }, }; if let Err(err) = update_client(tx, JobStatus::Created).await { @@ -1094,7 +1028,7 @@ async fn execute_task_local( let (code, stdout, stderr): (i32, String, String) = match exec.time_fut("join overhead", docker::join(dinfo, name, keep_container)).await { Ok(name) => name, Err(err) => { - return Err(JobStatus::CompletionFailed(format!("Failed to join container: {err}"))); + return Err(JobStatus::CompletionFailed(trace!(("Failed to join container"), err).to_string())); }, }; total.stop(); @@ -1118,13 +1052,13 @@ async fn execute_task_local( let raw: String = match decode_base64(output) { Ok(raw) => raw, Err(err) => { - return Err(JobStatus::DecodingFailed(format!("Failed to decode output ase base64: {err}"))); + return Err(JobStatus::DecodingFailed(trace!(("Failed to decode output as base64"), err).to_string())); }, }; let value: FullValue = match serde_json::from_str::>(&raw) { Ok(value) => value.unwrap_or(FullValue::Void), Err(err) => { - return Err(JobStatus::DecodingFailed(format!("Failed to decode output as JSON: {err}"))); + return Err(JobStatus::DecodingFailed(trace!(("Failed to decode output as JSON"), err).to_string())); }, }; decode.stop(); @@ -1254,6 +1188,7 @@ async fn execute_task_local( /// - `proxy`: The proxy client we use to proxy the data transfer. /// - `tx`: The channel to transmit stuff back to the client on. /// - `use_case`: A string denoting which use-case (registry) we're using. +/// - `delib_token`: A JWT used to access the Checker's deliberation API. /// - `workflow`: The Workflow that we're executing. Useful for communicating with the eFLINT backend. /// - `cinfo`: The CentralNodeInfo that specifies where to find services over at the central node. /// - `tinfo`: The TaskInfo that describes the task itself to execute. @@ -1271,6 +1206,7 @@ async fn execute_task( proxy: Arc, tx: Sender>, use_case: &str, + delib_token: &str, workflow: Workflow, cinfo: CentralNodeInfo, tinfo: TaskInfo, @@ -1290,7 +1226,7 @@ async fn execute_task( /* CALL PREPARATION */ // Next, query the API for a package index. let idx = prof.time("Index retrieval"); - let index: PackageIndex = match proxy.get_package_index(&format!("{}/graphql", cinfo.api_endpoint)).await { + let index: PackageIndex = match proxy.get_package_index(&format!("http://{}/graphql", cinfo.api_endpoint)).await { Ok(result) => match result { Ok(index) => index, Err(source) => { @@ -1341,7 +1277,7 @@ async fn execute_task( let _auth = prof.time("Authorization"); // First: make sure that the workflow is allowed by the checker - match assert_task_permission(worker_cfg, use_case, &workflow, tinfo.pc).await { + match assert_task_permission(worker_cfg, use_case, delib_token, &workflow, tinfo.pc).await { Ok(true) => { debug!("Checker accepted incoming workflow"); if let Err(err) = update_client(&tx, JobStatus::Authorized).await { @@ -1614,6 +1550,25 @@ enum CheckRequest { /// It's a task validation request Task(CheckTaskRequest), } +impl CheckRequest { + /// Explains what is being requested. + #[inline] + fn what(&self) -> Cow { + match self { + Self::Workflow(_) => Cow::Borrowed("workflow"), + Self::Task(t) => Cow::Owned(format!("task {:?} in workflow", pc_to_id(&t.workflow, t.task))), + } + } + + /// Retrieves the usecase from either request. + #[inline] + fn usecase(&self) -> &str { + match self { + Self::Workflow(w) => w.usecase.as_str(), + Self::Task(t) => t.usecase.as_str(), + } + } +} @@ -1627,6 +1582,8 @@ pub struct WorkerServer { node_config_path: PathBuf, /// Whether to remove containers after execution or not (but negated). keep_containers: bool, + /// The token used to access the checker's deliberation API. + delib_token: Arc, /// The proxy client to connect to the proxy service with. proxy: Arc, @@ -1642,6 +1599,7 @@ impl WorkerServer { /// # Arguments /// - `node_config_path`: The path to the `node.yml` file that describes this node's environment. /// - `keep_containers`: If true, then we will not remove containers after execution (useful for debugging). + /// - `delib_token`: A deliberation API JWT to authenticate ourselves at the Checker with. /// - `proxy`: The proxy client to connect to the proxy service with. /// /// # Returns @@ -1650,7 +1608,7 @@ impl WorkerServer { /// # Errors /// This function could error if it failed to load the node config file at `node_config_path`. #[inline] - pub fn new(node_config_path: impl Into, keep_containers: bool, proxy: Arc) -> Result { + pub fn new(node_config_path: impl Into, keep_containers: bool, delib_token: String, proxy: Arc) -> Result { // Read the node config to construct a map of caches let node_config_path: PathBuf = node_config_path.into(); let node: NodeConfig = match NodeConfig::from_path(&node_config_path) { @@ -1673,7 +1631,7 @@ impl WorkerServer { worker.usecases.into_iter().map(|(usecase, reg)| (usecase, DomainRegistryCache::new(reg.api))).collect(); // OK, return self - Ok(Self { node_config_path, keep_containers, proxy, registries: Arc::new(registries) }) + Ok(Self { node_config_path, keep_containers, delib_token: Arc::new(delib_token), proxy, registries: Arc::new(registries) }) } } @@ -1681,18 +1639,33 @@ impl WorkerServer { impl JobService for WorkerServer { type ExecuteStream = ReceiverStream>; - async fn check_workflow(&self, request: Request) -> Result, Status> { + async fn check_workflow( + &self, + request: Request>, + ) -> Result>>>, Status> { info!("Receiving check request for workflow validity..."); + // Get the request out + let req: CheckWorkflowRequest = request.into_inner().into_inner().map_err(|err| { + error!("{}", trace!(("Failed to parse incoming request"), err)); + Status::invalid_argument("Invalid request: could not parse workflow".to_string()) + })?; + // Pass to the abstracted version - check_workflow_or_task(&self.node_config_path, CheckRequest::Workflow(request.into_inner())).await + check_workflow_or_task(&self.node_config_path, &self.delib_token, CheckRequest::Workflow(req)).await } - async fn check_task(&self, request: Request) -> Result, Status> { + async fn check_task(&self, request: Request>) -> Result>>>, Status> { info!("Receiving check request for task validity..."); + // Get the request out + let req: CheckTaskRequest = request.into_inner().into_inner().map_err(|err| { + error!("{}", trace!(("Failed to parse incoming request"), err)); + Status::invalid_argument("Invalid request: could not parse workflow or task ID".to_string()) + })?; + // Pass to the abstracted version - check_workflow_or_task(&self.node_config_path, CheckRequest::Task(request.into_inner())).await + check_workflow_or_task(&self.node_config_path, &self.delib_token, CheckRequest::Task(req)).await } async fn preprocess(&self, request: Request) -> Result, Status> { @@ -1961,9 +1934,14 @@ impl JobService for WorkerServer { // Now move the rest to a separate task so we can return the start of the stream let keep_containers: bool = self.keep_containers; let proxy: Arc = self.proxy.clone(); + let delib_token: Arc = self.delib_token.clone(); tokio::spawn(async move { let worker: WorkerConfig = worker; - report.nest_fut("execution", |scope| execute_task(&worker, proxy, tx, &use_case, workflow, cinfo, tinfo, keep_containers, scope)).await + report + .nest_fut("execution", |scope| { + execute_task(&worker, proxy, tx, &use_case, &delib_token, workflow, cinfo, tinfo, keep_containers, scope) + }) + .await }); // Return the stream so the user can get updates diff --git a/brane-let/Cargo.toml b/brane-let/Cargo.toml index 4af460e7..480e3760 100644 --- a/brane-let/Cargo.toml +++ b/brane-let/Cargo.toml @@ -26,8 +26,6 @@ thiserror = "2.0.0" tokio = { version = "1.38.0", features = ["full", "time"] } tonic = "0.12.0" yaml-rust = { version = "0.8", package = "yaml-rust2" } - -brane-ast = { path = "../brane-ast" } brane-exe = { path = "../brane-exe" } specifications = { path = "../specifications" } diff --git a/brane-let/src/common.rs b/brane-let/src/common.rs index ac79b95b..494c45f9 100644 --- a/brane-let/src/common.rs +++ b/brane-let/src/common.rs @@ -4,7 +4,7 @@ // Created: // 14 Feb 2022, 14:21:21 // Last edited: -// 22 May 2023, 10:23:31 +// 14 Nov 2024, 17:23:41 // Auto updated? // Yes // @@ -12,11 +12,11 @@ //! Contains common definitions across all executions. // -use brane_ast::DataType; use brane_exe::FullValue; use log::debug; use specifications::common::Parameter; use specifications::package::PackageKind; +use specifications::wir::data_type::DataType; use crate::errors::LetError; diff --git a/brane-let/src/errors.rs b/brane-let/src/errors.rs index 4a19e787..d726b5c7 100644 --- a/brane-let/src/errors.rs +++ b/brane-let/src/errors.rs @@ -4,7 +4,7 @@ // Created: // 11 Feb 2022, 13:09:23 // Last edited: -// 22 May 2023, 10:12:51 +// 14 Nov 2024, 17:23:46 // Auto updated? // Yes // @@ -14,9 +14,9 @@ use std::path::PathBuf; -use brane_ast::DataType; use specifications::container::LocalContainerInfoError; use specifications::package::PackageKind; +use specifications::wir::data_type::DataType; /***** ERRORS *****/ diff --git a/brane-log/Cargo.toml b/brane-log/Cargo.toml new file mode 100644 index 00000000..b39fdf31 --- /dev/null +++ b/brane-log/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "brane-log" +edition = "2021" +rust-version = "1.74.1" +version.workspace = true +repository.workspace = true +authors.workspace = true +license.workspace = true + +[dependencies] +anyhow = "1.0.0" +async-stream = "0.3.0" +bincode = "1.3.0" +clap = { version = "4.0.24", features = ["derive","env"] } +dotenvy = "0.15.0" +env_logger = "0.10.0" +futures = "0.3.0" +juniper = "0.15.0" +juniper_graphql_ws = "0.3.0" +juniper_warp = { version = "0.7.0", features = ["subscriptions"] } +log = "0.4.0" +prost = "0.11.0" +rdkafka = { version = "0.31.0", features = ["cmake-build"] } +scylla = "0.2.0" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +time = { version = "0.3.0", features = ["formatting"] } +tokio = { version = "1.42.0", features = ["full"] } +warp = "0.3.0" + +[lints] +workspace = true diff --git a/brane-plr/Cargo.toml b/brane-plr/Cargo.toml index 041116bf..a1e045ad 100644 --- a/brane-plr/Cargo.toml +++ b/brane-plr/Cargo.toml @@ -17,14 +17,14 @@ log = "0.4.22" parking_lot = "0.12.1" rand = "0.9.0" serde_json = "1.0.120" -tokio = { version = "1.38.0", default-features = false, features = ["macros", "rt", "signal"] } +tokio = { version = "1.42.0", default-features = false, features = ["macros", "rt", "signal"] } tonic = "0.12.0" warp = "0.3.2" # Workspace dependencies reqwest = { workspace = true } -brane-ast = { path = "../brane-ast" } +policy-reasoner = { git = "https://github.com/BraneFramework/policy-reasoner", branch = "lib-refactor" } brane-cfg = { path = "../brane-cfg" } brane-prx = { path = "../brane-prx" } brane-tsk = { path = "../brane-tsk" } diff --git a/brane-plr/src/planner.rs b/brane-plr/src/planner.rs index ad42df65..6e354713 100644 --- a/brane-plr/src/planner.rs +++ b/brane-plr/src/planner.rs @@ -4,7 +4,7 @@ // Created: // 25 Oct 2022, 11:35:00 // Last edited: -// 08 Feb 2024, 17:33:49 +// 29 Apr 2025, 14:01:38 // Auto updated? // Yes // @@ -21,9 +21,6 @@ use std::sync::Arc; use std::time::{Duration, Instant}; use async_recursion::async_recursion; -use brane_ast::Workflow; -use brane_ast::ast::{ComputeTaskDef, Edge, SymTable, TaskDef}; -use brane_ast::locations::Locations; use brane_cfg::info::Info as _; use brane_cfg::infra::{InfraFile, InfraLocation}; use brane_cfg::node::{CentralConfig, NodeConfig}; @@ -32,14 +29,19 @@ use brane_tsk::api::get_data_index; use brane_tsk::errors::PlanError; use error_trace::trace; use log::{debug, error, info}; +use policy_reasoner::spec::reasonerconn::ReasonerResponse; +use policy_reasoner::spec::reasons::ManyReason; use rand::prelude::IteratorRandom; use serde_json::Value; use specifications::address::Address; +use specifications::checking::deliberation::{CheckResponse, CheckWorkflowRequest, Prost}; use specifications::data::{AccessKind, AvailabilityKind, DataIndex, DataName, PreprocessKind}; use specifications::package::Capability; use specifications::planning::{PlanningDeniedReply, PlanningReply, PlanningRequest}; use specifications::profiling::ProfileReport; -use specifications::working::{CheckReply, CheckWorkflowRequest, JobServiceClient}; +use specifications::wir::locations::Locations; +use specifications::wir::{ComputeTaskDef, Edge, SymTable, TaskDef, Workflow}; +use specifications::working::JobServiceClient; use warp::http::StatusCode; use warp::reject::Rejection; use warp::reply::Response; @@ -160,7 +162,7 @@ async fn plan_edges( let location: &str = &locs.restricted()[0]; // Fetch the list of capabilities supported by the planned location - let address: String = format!("{api_addr}/infra/capabilities/{location}"); + let address: String = format!("http://{api_addr}/infra/capabilities/{location}"); let res: reqwest::Response = reqwest::get(&address).await.map_err(|source| PlanError::RequestError { address: address.clone(), source })?; if !res.status().is_success() { @@ -233,7 +235,7 @@ async fn plan_edges( .registry; // Compute the registry access method - let address: String = format!("{registry}/results/download/{iname}"); + let address: String = format!("https://{registry}/results/download/{iname}"); debug!("Input intermediate result '{}' will be transferred in from '{}'", iname, address); // That's the location where to pull the dataset from @@ -407,7 +409,7 @@ fn plan_deferred( .registry; // Compute the registry access method - let address: String = format!("{registry}/results/download/{iname}"); + let address: String = format!("https://{registry}/results/download/{iname}"); debug!("Input intermediate result '{}' will be transferred in from '{}'", iname, address); // That's the location where to pull the dataset from @@ -515,40 +517,39 @@ fn plan_deferred( /// /// # Arguments /// - `proxy`: A [`ProxyClient`] that we use to connect to the checker. -/// - `splan`: An (already serialized) planned [`Workflow`] to validate. +/// - `plan`: A planned [`Workflow`] to validate. /// - `location`: The name of the location on which we're resolving (used for debugging purposes only). /// - `info`: The addresses where we find this location. /// /// # Errors /// This function errors if either we field to access any of the checkers, or they denied the workflow. -async fn validate_workflow_with(proxy: &ProxyClient, splan: &str, location: &str, info: &InfraLocation) -> Result<(), PlanError> { +async fn validate_workflow_with(proxy: &ProxyClient, plan: Workflow, location: &str, info: &InfraLocation) -> Result<(), PlanError> { debug!("Consulting checker of '{location}' for plan validity..."); let message: CheckWorkflowRequest = CheckWorkflowRequest { // NOTE: For now, we hardcode the central orchestrator as only "use-case" (registry) - use_case: "central".into(), - workflow: splan.into(), + usecase: "central".into(), + workflow: plan, }; // Create the client let mut client: JobServiceClient = proxy - .connect_to_job(info.delegate.to_string()) + .connect_to_job(format!("grpc://{}", info.delegate)) .await .map_err(|source| PlanError::ProxyError { source: Box::new(source) })? .map_err(|source| PlanError::GrpcConnectError { endpoint: info.delegate.clone(), source })?; // Send the request to the job node - let response: tonic::Response = client.check_workflow(message).await.map_err(|source| PlanError::GrpcRequestError { - what: "CheckRequest", - endpoint: info.delegate.clone(), - source, - })?; - let result: CheckReply = response.into_inner(); + let response: tonic::Response>>> = client + .check_workflow(Prost::::new(message)) + .await + .map_err(|source| PlanError::GrpcRequestError { what: "CheckRequest", endpoint: info.delegate.clone(), source })?; + let result: CheckResponse> = response.into_inner().into_inner(); // Examine if it was OK - if !result.verdict { + if let ReasonerResponse::Violated(reasons) = result.verdict { debug!("Checker of '{location}' DENIES plan"); - return Err(PlanError::CheckerDenied { domain: location.into(), reasons: result.reasons }); + return Err(PlanError::CheckerDenied { domain: location.into(), reasons: reasons.into_iter().collect() }); } // Otherwise, OK! @@ -610,7 +611,7 @@ pub async fn handle(context: Arc, body: PlanningRequest) -> Result dindex, @@ -717,19 +718,15 @@ pub async fn handle(context: Arc, body: PlanningRequest) -> Result splan, - Err(err) => { - return err_response!(internal_error "{}", trace!(("Failed to serialize plan JSON"), err)); - }, - }; ser.stop(); // Check with the checker(s) if this plan is OK! debug!("Consulting {} checkers with plan validity...", infra.len()); let val = report.nest("Policy validation"); for (location, info) in infra.iter() { - match val.time_fut(format!("Domain '{}' ({})", location, info.registry), validate_workflow_with(&context.proxy, &splan, location, info)).await + match val + .time_fut(format!("Domain '{}' ({})", location, info.registry), validate_workflow_with(&context.proxy, workflow.clone(), location, info)) + .await { Ok(_) => {}, Err(PlanError::CheckerDenied { domain, reasons }) => { diff --git a/brane-prx/Cargo.toml b/brane-prx/Cargo.toml index ac487667..cc78bbbd 100644 --- a/brane-prx/Cargo.toml +++ b/brane-prx/Cargo.toml @@ -18,7 +18,7 @@ never-say-never = "6.6.666" rustls = "0.21.6" serde = { version = "1.0.204", features = ["derive"] } serde_json = "1.0.120" -socksx = { git = "https://github.com/braneframework/socksx", tag = "v2.0.0" } +socksx = { git = "https://github.com/epi-project/socksx", tag = "v2.0.0" } thiserror = "2.0.0" tokio = { version = "1.38.0", default-features = false, features = ["macros", "rt", "signal"] } tokio-rustls = "0.24.0" diff --git a/brane-prx/src/client.rs b/brane-prx/src/client.rs index 1eb280a4..cf8f72f7 100644 --- a/brane-prx/src/client.rs +++ b/brane-prx/src/client.rs @@ -4,7 +4,7 @@ // Created: // 25 Nov 2022, 15:09:17 // Last edited: -// 15 Jan 2024, 15:16:14 +// 29 Apr 2025, 14:01:46 // Auto updated? // Yes // @@ -34,7 +34,7 @@ use crate::spec::{NewPathRequest, NewPathRequestTlsOptions}; /// /// # Arguments /// - `endpoint`: The proxy service to connect to (hostname + address). -/// - `remote_address`: The remote address to connect to through the proxy. +/// - `remote`: The remote address to connect to through the proxy. /// - `tls`: If given, whether to use TLS and for what location. /// /// # Returns @@ -42,11 +42,15 @@ use crate::spec::{NewPathRequest, NewPathRequestTlsOptions}; /// /// # Errors /// This function errors if we failed to create the port for whatever reason. -async fn create_path(endpoint: &Url, remote: impl Into, tls: &Option) -> Result { +/// +/// # Panics +/// This function panics if the given `endpoint` was not capable of changing its schema (i.e., its invalid). +async fn create_path(mut endpoint: Url, remote: impl Into, tls: &Option) -> Result { let remote: String = remote.into(); debug!("Creating path to '{}' on proxy service '{}'...", remote, endpoint); // Prepare the request + endpoint.set_scheme("http").unwrap_or_else(|_| panic!("Failed to set \"http\" as schema")); let request: NewPathRequest = NewPathRequest { address: remote.clone(), tls: tls.clone() }; // Send it with reqwest @@ -98,7 +102,7 @@ impl ProxyClient { // Parse the address as an endpoint let endpoint: Url = - Url::from_str(&endpoint.to_string()).unwrap_or_else(|err| panic!("Cannot parse given address '{endpoint}' as a URL: {err}")); + Url::from_str(&format!("http://{endpoint}")).unwrap_or_else(|err| panic!("Cannot parse given address '{endpoint}' as a URL: {err}")); if endpoint.domain().is_none() { panic!("Given address '{endpoint}' does not have a domain"); } @@ -184,7 +188,7 @@ impl ProxyClient { info!("Sending HTTP request to '{}' through proxy service at '{}'", request.url(), self.endpoint); // Assert it has the appropriate fields - let url: &Url = request.url_mut(); + let url: &Url = request.url(); if url.domain().is_none() { panic!("URL {url} does not have a domain defined"); } @@ -204,7 +208,7 @@ impl ProxyClient { Some(port) => port, None => { // Create the path - let port: u16 = create_path(&self.endpoint, &remote, &tls).await?; + let port: u16 = create_path(self.endpoint.clone(), &remote, &tls).await?; // Store it in the internal map for next time let mut lock: RwLockWriteGuard), u16>> = self.paths.write().unwrap(); @@ -276,7 +280,7 @@ impl ProxyClient { Some(port) => port, None => { // Create the path - let port: u16 = create_path(&self.endpoint, &remote, &None).await?; + let port: u16 = create_path(self.endpoint.clone(), &remote, &None).await?; // Store it in the internal map for next time let mut lock: RwLockWriteGuard), u16>> = self.paths.write().unwrap(); @@ -346,7 +350,7 @@ impl ProxyClient { Some(port) => port, None => { // Create the path - let port: u16 = create_path(&self.endpoint, &remote, &None).await?; + let port: u16 = create_path(self.endpoint.clone(), &remote, &None).await?; // Store it in the internal map for next time let mut lock: RwLockWriteGuard), u16>> = self.paths.write().unwrap(); diff --git a/brane-prx/src/manage.rs b/brane-prx/src/manage.rs index d82c5398..10c59539 100644 --- a/brane-prx/src/manage.rs +++ b/brane-prx/src/manage.rs @@ -4,7 +4,7 @@ // Created: // 23 Nov 2022, 11:07:05 // Last edited: -// 12 Jan 2024, 10:43:44 +// 14 Nov 2024, 15:00:35 // Auto updated? // Yes // @@ -181,7 +181,7 @@ pub async fn new_incoming_path(port: u16, address: Address, context: Arc{}->{}: Got new connection from '{}'", port, address, client_addr); // Now we establish a new connection to the internal host - let addr: String = format!("{}:{}", address.domain(), address.port()); + let addr: String = format!("{}:{}", address.host, address.port); debug!("Connecting to '{}'...", addr); let mut oconn: TcpStream = match TcpStream::connect(&addr).await { Ok(oconn) => oconn, diff --git a/brane-reg/Cargo.toml b/brane-reg/Cargo.toml index be8c82c3..99ac3d39 100644 --- a/brane-reg/Cargo.toml +++ b/brane-reg/Cargo.toml @@ -9,7 +9,6 @@ license.workspace = true [dependencies] clap = { version = "4.5.6", features = ["derive","env"] } -deliberation = { git = "https://github.com/braneframework/policy-reasoner" } dotenvy = "0.15.0" enum-debug.workspace = true env_logger = "0.11.0" @@ -28,9 +27,8 @@ warp = "0.3.2" # Workspace dependencies reqwest = { workspace = true } -brane-ast = { path = "../brane-ast" } +policy-reasoner = { git = "https://github.com/BraneFramework/policy-reasoner", branch = "lib-refactor" } brane-cfg = { path = "../brane-cfg" } -brane-exe = { path = "../brane-exe" } brane-shr = { path = "../brane-shr" } brane-tsk = { path = "../brane-tsk" } specifications = { path = "../specifications" } diff --git a/brane-reg/src/check.rs b/brane-reg/src/check.rs index 9be33628..448288a3 100644 --- a/brane-reg/src/check.rs +++ b/brane-reg/src/check.rs @@ -4,7 +4,7 @@ // Created: // 07 Feb 2024, 13:40:32 // Last edited: -// 07 Feb 2024, 14:42:13 +// 02 May 2025, 15:12:24 // Auto updated? // Yes // @@ -14,19 +14,19 @@ use std::sync::Arc; -use brane_ast::Workflow; -use brane_ast::ast::Edge; -use brane_ast::func_id::FunctionId; use brane_cfg::info::Info as _; use brane_cfg::node::{NodeConfig, NodeSpecificConfig, WorkerConfig}; -use brane_exe::pc::ProgramCounter; use brane_shr::formatters::BlockFormatter; use enum_debug::EnumDebug as _; use error_trace::trace; use log::{debug, error, info}; +use policy_reasoner::spec::reasons::ManyReason; use specifications::data::DataName; +use specifications::pc::ProgramCounter; use specifications::profiling::ProfileReport; use specifications::registering::{CheckTransferReply, CheckTransferRequest}; +use specifications::wir::func_id::FunctionId; +use specifications::wir::{Edge, Workflow}; use warp::hyper::StatusCode; use warp::reject::Rejection; use warp::reply::{self, Reply, Response}; @@ -157,12 +157,15 @@ async fn check_data_or_result(name: DataName, body: CheckTransferRequest, contex prep.stop(); // Attempt to parse the certificate to get the client's name (which tracks because it's already authenticated) - match report.time_fut("Checker", assert_asset_permission(&worker_config, &use_case, &workflow, &target, name.clone(), task)).await { + match report + .time_fut("Checker", assert_asset_permission(&worker_config, &use_case, &context.delib_token, &workflow, &target, name.clone(), task)) + .await + { Ok(None) => { info!("Checker authorized transfer of dataset '{}' to '{}'", name, target); // Serialize the response - let res: String = match serde_json::to_string(&CheckTransferReply { verdict: true, reasons: vec![] }) { + let res: String = match serde_json::to_string(&CheckTransferReply { verdict: true, reasons: ManyReason::new() }) { Ok(res) => res, Err(err) => { error!("{}", trace!(("Failed to serialize ChecKTransferReply"), err)); diff --git a/brane-reg/src/cli.rs b/brane-reg/src/cli.rs index 64d7fb75..12fa75cf 100644 --- a/brane-reg/src/cli.rs +++ b/brane-reg/src/cli.rs @@ -19,4 +19,8 @@ pub(crate) struct Cli { env = "NODE_CONFIG_PATH" )] pub(crate) node_config_path: PathBuf, + + /// The token to authenticate ourselves with the checker with. + #[clap(long, help = "A token to authenticate to the given Checker service with.", env = "CHECKER_DELIB_TOKEN")] + pub(crate) delib_token: String, } diff --git a/brane-reg/src/data.rs b/brane-reg/src/data.rs index 42c31f28..a2b414d9 100644 --- a/brane-reg/src/data.rs +++ b/brane-reg/src/data.rs @@ -4,7 +4,7 @@ // Created: // 26 Sep 2022, 15:40:40 // Last edited: -// 07 Feb 2024, 14:19:12 +// 02 May 2025, 15:15:02 // Auto updated? // Yes // @@ -16,29 +16,27 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::sync::Arc; -use std::time::Duration; -use brane_ast::Workflow; -use brane_ast::ast::Edge; -use brane_ast::func_id::FunctionId; use brane_cfg::certs::extract_client_name; use brane_cfg::info::Info as _; use brane_cfg::node::{NodeConfig, NodeSpecificConfig, WorkerConfig}; -use brane_exe::pc::ProgramCounter; use brane_shr::formatters::BlockFormatter; use brane_shr::fs::archive_async; use brane_tsk::errors::AuthorizeError; -use deliberation::spec::Verdict; use enum_debug::EnumDebug as _; use error_trace::{ErrorTrace as _, trace}; use log::{debug, error, info}; -use reqwest::header; +use policy_reasoner::spec::reasonerconn::ReasonerResponse; +use policy_reasoner::spec::reasons::ManyReason; use rustls::Certificate; use serde::{Deserialize, Serialize}; -use specifications::checking::DELIBERATION_API_TRANSFER_DATA; +use specifications::checking::deliberation as checking; use specifications::data::{AccessKind, AssetInfo, DataName}; +use specifications::pc::ProgramCounter; use specifications::profiling::ProfileReport; use specifications::registering::DownloadAssetRequest; +use specifications::wir::func_id::FunctionId; +use specifications::wir::{Edge, Workflow}; use tempfile::TempDir; use tokio::fs as tfs; use tokio::io::AsyncReadExt; @@ -60,10 +58,11 @@ use crate::store::Store; /// # Arguments /// - `worker_cfg`: The configuration for this node's environment. For us, contains if and where we should proxy the request through and where we may find the checker. /// - `use_case`: A string denoting which use-case (registry) we're using. +/// - `delib_token`: A token used to access the checker's deliberation API. /// - `workflow`: The workflow to check. /// - `client_name`: The name as which the client is authenticated. Will be matched with the indicated task. /// - `data_name`: The name of the dataset they are trying to access. -/// - `call`: A program counter that identifies for which call in the workflow we're doing this request (if any). +/// - `call`: A program counter that identifies for which call in the workflow we're doing this request. /// /// # Returns /// Whether permission is given or not. It is given as an [`Option`] that, when [`None`], means permission is given; else, it carries a list of reasons why not (if shared by the checker). @@ -73,16 +72,17 @@ use crate::store::Store; pub async fn assert_asset_permission( worker_cfg: &WorkerConfig, use_case: &str, + delib_token: &str, workflow: &Workflow, client_name: &str, data_name: DataName, call: Option, -) -> Result>, AuthorizeError> { +) -> Result>, AuthorizeError> { info!( "Checking data access of '{}'{} permission with checker '{}'...", data_name, if let Some(call) = call { format!(" (in the context of {})", call) } else { String::new() }, - worker_cfg.services.chk.address + worker_cfg.services.chk.host ); // Check if the authenticated name checks out @@ -146,24 +146,15 @@ pub async fn assert_asset_permission( // Alrighty tighty, let's begin by building the request for the checker debug!("Constructing checker request..."); - let body: AccessDataRequest = - AccessDataRequest { use_case: use_case.into(), workflow: workflow.clone(), data_id: data_name.name().into(), task_id: call }; - - // Next, generate a JWT to inject in the request - let jwt: String = specifications::policy::generate_policy_token( - if let Some(user) = &*workflow.user { user.as_str() } else { "UNKNOWN" }, - &worker_cfg.name, - Duration::from_secs(60), - &worker_cfg.paths.policy_deliberation_secret, - ) - .map_err(|source| AuthorizeError::TokenGenerate { secret: worker_cfg.paths.policy_deliberation_secret.clone(), source })?; + let body: checking::CheckTransferRequest = + checking::CheckTransferRequest { usecase: use_case.into(), workflow: workflow.clone(), task: call, input: data_name.name().into() }; // Prepare the request to send let client: reqwest::Client = reqwest::Client::builder().build().map_err(|source| AuthorizeError::ClientBuild { source })?; - let addr: String = format!("{}/{}", worker_cfg.services.chk.address, DELIBERATION_API_TRANSFER_DATA.1); + let addr: String = format!("http://{}:{}{}", worker_cfg.services.chk.host, worker_cfg.services.chk.delib, checking::CHECK_TRANSFER_PATH.path); let req: reqwest::Request = client - .request(DELIBERATION_API_TRANSFER_DATA.0, &addr) - .header(header::AUTHORIZATION, format!("Bearer {jwt}")) + .request(checking::CHECK_TRANSFER_PATH.method, &addr) + .bearer_auth(delib_token) .json(&body) .build() .map_err(|source| AuthorizeError::ExecuteRequestBuild { addr: addr.clone(), source })?; @@ -173,17 +164,17 @@ pub async fn assert_asset_permission( let res: reqwest::Response = client.execute(req).await.map_err(|source| AuthorizeError::ExecuteRequestSend { addr: addr.clone(), source })?; // Match on the status code to find if it's OK - debug!("Waiting for checker response..."); + debug!("Checker response: {} ({})", res.status().as_u16(), res.status().canonical_reason().unwrap_or("???")); if !res.status().is_success() { return Err(AuthorizeError::ExecuteRequestFailure { addr, code: res.status(), err: res.text().await.ok() }); } let res: String = res.text().await.map_err(|source| AuthorizeError::ExecuteBodyDownload { addr: addr.clone(), source })?; - let res: Verdict = + let res: checking::CheckResponse> = serde_json::from_str(&res).map_err(|source| AuthorizeError::ExecuteBodyDeserialize { addr: addr.clone(), raw: res, source })?; // Now match the checker's response - match res { - Verdict::Allow(_) => { + match res.verdict { + ReasonerResponse::Success => { info!( "Checker ALLOWED data access of '{}'{}", data_name, @@ -192,13 +183,13 @@ pub async fn assert_asset_permission( Ok(None) }, - Verdict::Deny(verdict) => { + ReasonerResponse::Violated(reasons) => { info!( "Checker DENIED data access of '{}'{}", data_name, if let Some(call) = call { format!(" (in the context of {})", call) } else { String::new() }, ); - Ok(Some(verdict.reasons_for_denial.unwrap_or_else(Vec::new))) + Ok(Some(reasons)) }, } } @@ -366,7 +357,7 @@ pub async fn download_data( body: DownloadAssetRequest, context: Arc, ) -> Result { - let DownloadAssetRequest { use_case, workflow, task: _ } = body; + let DownloadAssetRequest { use_case, workflow, task } = body; info!("Handling GET on `/data/download/{}` (i.e., download dataset)...", name); // Parse if a valid workflow is given @@ -432,10 +423,11 @@ pub async fn download_data( match assert_asset_permission( &worker_config, &use_case, + &context.delib_token, &workflow, &client_name, DataName::Data(name.clone()), - body.task.map(|t| ProgramCounter::new(if let Some(id) = t.0 { FunctionId::Func(id as usize) } else { FunctionId::Main }, t.1 as usize)), + task.map(|t| ProgramCounter::new(if let Some(id) = t.0 { FunctionId::Func(id as usize) } else { FunctionId::Main }, t.1 as usize)), ) .await { @@ -550,7 +542,7 @@ pub async fn download_result( body: DownloadAssetRequest, context: Arc, ) -> Result { - let DownloadAssetRequest { use_case, workflow, task: _ } = body; + let DownloadAssetRequest { use_case, workflow, task } = body; info!("Handling GET on `/results/download/{}` (i.e., download intermediate result)...", name); // Parse if a valid workflow is given @@ -617,10 +609,11 @@ pub async fn download_result( match assert_asset_permission( &worker_config, &use_case, + &context.delib_token, &workflow, &client_name, DataName::IntermediateResult(name.clone()), - body.task.map(|t| ProgramCounter::new(if let Some(id) = t.0 { FunctionId::Func(id as usize) } else { FunctionId::Main }, t.1 as usize)), + task.map(|t| ProgramCounter::new(if let Some(id) = t.0 { FunctionId::Func(id as usize) } else { FunctionId::Main }, t.1 as usize)), ) .await .map_err(|source| { diff --git a/brane-reg/src/main.rs b/brane-reg/src/main.rs index fb4c15f8..152a5acf 100644 --- a/brane-reg/src/main.rs +++ b/brane-reg/src/main.rs @@ -4,7 +4,7 @@ // Created: // 26 Sep 2022, 15:11:44 // Last edited: -// 07 Feb 2024, 14:42:42 +// 02 May 2025, 13:33:02 // Auto updated? // Yes // @@ -64,7 +64,7 @@ async fn main() { // Put the path in a context - let context: Arc = Arc::new(Context { node_config_path: args.node_config_path }); + let context: Arc = Arc::new(Context { node_config_path: args.node_config_path, delib_token: args.delib_token }); let context = warp::any().map(move || context.clone()); diff --git a/brane-reg/src/spec.rs b/brane-reg/src/spec.rs index 22e26142..99d85b40 100644 --- a/brane-reg/src/spec.rs +++ b/brane-reg/src/spec.rs @@ -4,7 +4,7 @@ // Created: // 06 Nov 2022, 17:05:19 // Last edited: -// 06 Dec 2022, 11:19:22 +// 02 May 2025, 13:32:53 // Auto updated? // Yes // @@ -21,4 +21,6 @@ use std::path::PathBuf; pub struct Context { /// The path to the node config file. pub node_config_path: PathBuf, + /// The deliberation token used to commune with the checker. + pub delib_token: String, } diff --git a/brane-tsk/Cargo.toml b/brane-tsk/Cargo.toml index ad4fc6ef..5322a311 100644 --- a/brane-tsk/Cargo.toml +++ b/brane-tsk/Cargo.toml @@ -36,7 +36,6 @@ uuid = { version = "1.7.0", features = ["v4"] } # Workspace dependencies reqwest = { workspace = true, features = ["json","stream","multipart"] } - brane-ast = { path = "../brane-ast" } brane-cfg = { path = "../brane-cfg" } brane-exe = { path = "../brane-exe" } diff --git a/brane-tsk/src/caches.rs b/brane-tsk/src/caches.rs index edd40473..ecf3ec7f 100644 --- a/brane-tsk/src/caches.rs +++ b/brane-tsk/src/caches.rs @@ -4,7 +4,7 @@ // Created: // 31 Jan 2024, 11:45:19 // Last edited: -// 31 Jan 2024, 14:24:26 +// 02 May 2025, 11:46:24 // Auto updated? // Yes // @@ -18,13 +18,13 @@ use std::error::Error; use std::fmt::{Display, Formatter, Result as FResult}; use std::time::{Duration, Instant}; -use brane_ast::locations::Location; use brane_shr::formatters::BlockFormatter; use log::debug; use num_traits::AsPrimitive; use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use reqwest::{Response, StatusCode}; use specifications::address::Address; +use specifications::wir::locations::Location; /***** CONSTANTS *****/ @@ -159,7 +159,7 @@ impl DomainRegistryCache { } // We didn't found a valid entry, so make a request for a new one - let url: String = format!("{}/infra/registries", self.api); + let url: String = format!("http://{}/infra/registries", self.api); debug!("Sending GET-request to '{url}'..."); let res: Response = match reqwest::get(&url).await { Ok(res) => res, diff --git a/brane-tsk/src/docker.rs b/brane-tsk/src/docker.rs index d8c8a1de..9627f858 100644 --- a/brane-tsk/src/docker.rs +++ b/brane-tsk/src/docker.rs @@ -4,7 +4,7 @@ // Created: // 19 Sep 2022, 14:57:17 // Last edited: -// 08 Feb 2024, 15:15:18 +// 02 May 2025, 11:26:36 // Auto updated? // Yes // @@ -707,39 +707,105 @@ async fn remove_container(docker: &Docker, name: impl AsRef) -> Result<(), docker.remove_container(name, remove_options).await.map_err(|source| Error::ContainerRemoveError { name: name.into(), source }) } +/// Given an Image, will attempt search the Docker instance to find its digest. +/// +/// This is different from [`get_digest()`] as that one searches files. +/// +/// # Arguments +/// - `docker`: An already connected local instance of Docker. +/// - `image`: Some [`Image`] to find the digest of. Any digest already present is ignored. +/// +/// # Returns +/// The found digest, or [`None`] if we did not find the image in the instance. +/// +/// # Errors +/// This function fails if we failed to commune with the given Docker instance. +async fn read_digest(docker: &Docker, image: impl AsRef) -> Result, Error> { + let image: &Image = image.as_ref(); + + // Attempt to list all images + for info in docker + .list_images(Some(bollard::image::ListImagesOptions:: { all: true, digests: true, ..Default::default() })) + .await + .map_err(|source| Error::ImageList { source })? + { + for tag in &info.repo_tags { + let tag = tag.split(':').collect::>(); + if let Some(image_version) = &image.version { + if let [name, version] = tag.as_slice() { + if *name == image.name && version == image_version { + let id: &str = if info.id.starts_with("sha256:") { &info.id[7..] } else { &info.id }; + return Ok(Some(id.into())); + } + } + } else { + if let [name] = tag.as_slice() { + if *name == image.name { + let id: &str = if info.id.starts_with("sha256:") { &info.id[7..] } else { &info.id }; + return Ok(Some(id.into())); + } + } + } + } + } + + // Didn't find it! + Ok(None) +} + /// Tries to import the image at the given path into the given Docker instance. /// /// # Arguments /// - `docker`: An already connected local instance of Docker. /// - `image`: The image to pull. -/// - `source`: Path to the image to import. +/// - `image_source`: Path to the image to import. /// /// # Returns -/// Nothing on success, or an ExecutorError otherwise. -async fn import_image(docker: &Docker, image: impl Into, source: impl AsRef) -> Result<(), Error> { - let image: Image = image.into(); - let source: &Path = source.as_ref(); +/// The given `image`, possibly updated with a new ID if another was returned by Docker. +/// +/// # Errors +/// This function errors if we failed to import the given image file or tag it. +async fn import_image(docker: &Docker, image: impl Into, image_source: impl AsRef) -> Result { + let mut image: Image = image.into(); + let image_source: &Path = image_source.as_ref(); let options = ImportImageOptions { quiet: true }; // Try to read the file - let file = TFile::open(source).await.map_err(|err_source| Error::ImageFileOpenError { path: PathBuf::from(source), source: err_source })?; + let file = TFile::open(image_source).await.map_err(|source| Error::ImageFileOpenError { path: PathBuf::from(image_source), source })?; // If successful, open the byte with a FramedReader, freezing all the chunk we read let byte_stream = FramedRead::new(file, BytesCodec::new()).map(|r| r.unwrap().freeze()); - docker - .import_image_stream(options, byte_stream, None) - .try_collect::>() - .await - .map_err(|err_source| Error::ImageImportError { path: PathBuf::from(source), source: err_source })?; + image.digest = match docker.import_image_stream(options, byte_stream, None).try_collect::>().await { + Ok(res) => { + // See if Docker told us the ID directly + let mut id: Option = None; + for info in &res { + if let Some(stream) = &info.stream { + if stream.starts_with("Loaded image ID: sha256:") && stream.ends_with("\n") { + id = Some(stream[24..stream.len() - 1].into()); + break; + } + } + } + + // If not, then we'll try to manually retrieve it + if id.is_none() { + id = read_digest(docker, &image).await?; + } + + // Return! + Some(id.ok_or_else(|| Error::ImageImportFindId { path: image_source.into(), infos: res })?) + }, + Err(source) => return Err(Error::ImageImportError { path: PathBuf::from(image_source), source }), + }; // Tag it with the appropriate name & version let options = Some(TagImageOptions { repo: image.name.clone(), tag: image.version.clone().unwrap() }); - docker.tag_image(image.digest.as_ref().unwrap(), options).await.map_err(|err_source| Error::ImageTagError { - image: Box::new(image), - image_source: source.to_string_lossy().to_string(), - source: err_source, - }) + match docker.tag_image(image.digest.as_ref().unwrap(), options).await { + Ok(_) => Ok(image), + Err(source) => Err(Error::ImageTagError { image: Box::new(image), image_source: image_source.to_string_lossy().to_string(), source }), + } } /// Pulls a new image from the given Docker image ID / URL (?) and imports it in the Docker instance. @@ -751,19 +817,32 @@ async fn import_image(docker: &Docker, image: impl Into, source: impl AsR /// /// # Errors /// This function errors if we failed to pull the image, e.g., the Docker engine did not know where to find it, or there was no internet. -async fn pull_image(docker: &Docker, image: impl Into, image_source: impl Into) -> Result<(), Error> { - let image: Image = image.into(); +async fn pull_image(docker: &Docker, image: impl Into, image_source: impl Into) -> Result { + let mut image: Image = image.into(); let image_source: String = image_source.into(); // Define the options for this image let options = Some(CreateImageOptions { from_image: image_source.clone(), ..Default::default() }); // Try to create it - docker - .create_image(options, None, None) - .try_collect::>() - .await - .map_err(|source| Error::ImagePullError { image_source: image_source.clone(), source })?; + image.digest = match docker.create_image(options, None, None).try_collect::>().await { + Ok(res) => { + let mut id: Option = None; + for info in &res { + if let Some(status) = &info.status { + if status.starts_with("Digest: sha256:") { + id = Some(status[15..].into()); + break; + } + } + } + match id { + Some(id) => Some(id), + None => return Err(Error::ImagePullFindId { image_source, infos: res }), + } + }, + Err(source) => return Err(Error::ImagePullError { image_source, source }), + }; // Set the options let options: Option> = Some(if let Some(version) = &image.version { @@ -773,11 +852,10 @@ async fn pull_image(docker: &Docker, image: impl Into, image_source: impl }); // Now tag it - docker.tag_image(&image_source, options).await.map_err(|source| Error::ImageTagError { - image: Box::new(image), - image_source: image_source.clone(), - source, - }) + match docker.tag_image(&image_source, options).await { + Ok(_) => Ok(image), + Err(source) => Err(Error::ImageTagError { image: Box::new(image), image_source, source }), + } } @@ -996,9 +1074,12 @@ pub async fn hash_container(container_path: impl AsRef) -> Result, source: impl Into) -> Result<(), Error> { +pub async fn ensure_image(docker: &Docker, image: impl Into, source: impl Into) -> Result { let image: Image = image.into(); let source: ImageSource = source.into(); @@ -1006,7 +1087,7 @@ pub async fn ensure_image(docker: &Docker, image: impl Into, source: impl match docker.inspect_image(&image.docker().to_string()).await { Ok(_) => { debug!("Image '{}' already exists in Docker deamon.", image.docker()); - return Ok(()); + return Ok(image); }, Err(bollard::errors::Error::DockerResponseServerError { status_code: 404, message: _ }) => { debug!("Image '{}' doesn't exist in Docker daemon.", image.docker()); diff --git a/brane-tsk/src/errors.rs b/brane-tsk/src/errors.rs index 194d23aa..879d883c 100644 --- a/brane-tsk/src/errors.rs +++ b/brane-tsk/src/errors.rs @@ -4,7 +4,7 @@ // Created: // 24 Oct 2022, 15:27:26 // Last edited: -// 08 Feb 2024, 16:47:05 +// 02 May 2025, 11:43:11 // Auto updated? // Yes // @@ -18,10 +18,7 @@ use std::fmt::{Display, Formatter, Result as FResult, Write}; use std::path::PathBuf; use bollard::ClientVersion; -use brane_ast::Workflow; -use brane_ast::func_id::FunctionId; -use brane_ast::locations::{Location, Locations}; -use brane_exe::pc::ProgramCounter; +use bollard::secret::{BuildInfo, CreateImageInfo}; use brane_shr::formatters::{BlockFormatter, Capitalizeable}; use enum_debug::EnumDebug as _; use reqwest::StatusCode; @@ -31,7 +28,11 @@ use specifications::container::Image; use specifications::data::DataName; use specifications::driving::ExecuteReply; use specifications::package::Capability; +use specifications::pc::ProgramCounter; use specifications::version::Version; +use specifications::wir::Workflow; +use specifications::wir::func_id::FunctionId; +use specifications::wir::locations::{Location, Locations}; // The TaskReply is here for legacy reasons; bad name use specifications::working::{ExecuteReply as TaskReply, TaskStatus}; use tonic::Status; @@ -164,13 +165,13 @@ pub enum PreprocessError { ProxyError { source: Box }, /// Failed to connect to a delegate node with gRPC #[error("Failed to start gRPC connection with delegate node '{endpoint}'")] - GrpcConnectError { endpoint: Address, source: specifications::working::Error }, + GrpcConnectError { endpoint: String, source: specifications::working::Error }, /// Failed to send a preprocess request to a delegate node with gRPC #[error("Failed to send {what} request to delegate node '{endpoint}'")] - GrpcRequestError { what: &'static str, endpoint: Address, source: tonic::Status }, + GrpcRequestError { what: &'static str, endpoint: String, source: tonic::Status }, /// Failed to re-serialize the access kind. #[error("Failed to parse access kind '{raw}' sent by remote delegate '{endpoint}'")] - AccessKindParseError { endpoint: Address, raw: String, source: serde_json::Error }, + AccessKindParseError { endpoint: String, raw: String, source: serde_json::Error }, /// Failed to open/read a given file. #[error("Failed to read {} file '{}'", what, path.display())] @@ -314,13 +315,13 @@ pub enum ExecuteError { ProxyError { source: Box }, /// Failed to connect to a delegate node with gRPC #[error("Failed to start gRPC connection with delegate node '{endpoint}'")] - GrpcConnectError { endpoint: Address, source: specifications::working::Error }, + GrpcConnectError { endpoint: String, source: specifications::working::Error }, /// Failed to send a preprocess request to a delegate node with gRPC #[error("Failed to send {what} request to delegate node '{endpoint}'")] - GrpcRequestError { what: &'static str, endpoint: Address, source: tonic::Status }, + GrpcRequestError { what: &'static str, endpoint: String, source: tonic::Status }, /// Preprocessing failed with the following error. #[error("Remote delegate '{endpoint}' returned status '{status:?}' while executing task '{name}'")] - ExecuteError { endpoint: Address, name: String, status: TaskStatus, source: StringError }, + ExecuteError { endpoint: String, name: String, status: TaskStatus, source: StringError }, // Instance-only (worker side) /// Failed to load the digest cache file @@ -483,10 +484,10 @@ pub enum CommitError { ProxyError { source: Box }, /// Failed to connect to a delegate node with gRPC #[error("Failed to start gRPC connection with delegate node '{endpoint}'")] - GrpcConnectError { endpoint: Address, source: specifications::working::Error }, + GrpcConnectError { endpoint: String, source: specifications::working::Error }, /// Failed to send a preprocess request to a delegate node with gRPC #[error("Failed to send {what} request to delegate node '{endpoint}'")] - GrpcRequestError { what: &'static str, endpoint: Address, source: tonic::Status }, + GrpcRequestError { what: &'static str, endpoint: String, source: tonic::Status }, // Instance-only (worker side) /// Failed to read the AssetInfo file. @@ -558,6 +559,13 @@ pub enum DockerError { /// Failed to import the given image file. #[error("Failed to import image file '{}' into Docker engine", path.display())] ImageImportError { path: PathBuf, source: bollard::errors::Error }, + /// Failed to find the digest after importing the given image. + #[error("Failed to read digest after importing image '{}'\n\nReturned infos:\n{}\n{}\n{}\n", + path.display(), + "-".repeat(80), + infos.iter().map(|i| format!("{i:?}")).collect::>().join("\n"), + "-".repeat(80))] + ImageImportFindId { path: PathBuf, infos: Vec }, /// Failed to create the given image file. #[error("Failed to create image file '{}'", path.display())] ImageFileCreateError { path: PathBuf, source: std::io::Error }, @@ -574,6 +582,13 @@ pub enum DockerError { /// Failed to pull the given image file. #[error("Failed to pull image '{source}' into Docker engine")] ImagePullError { image_source: String, source: bollard::errors::Error }, + /// Failed to find the digest after pulling the given image. + #[error("Failed to read digest after pulling image '{}'\n\nReturned infos:\n{}\n{}\n{}\n", + image_source, + "-".repeat(80), + infos.iter().map(|i| format!("{i:?}")).collect::>().join("\n"), + "-".repeat(80))] + ImagePullFindId { image_source: String, infos: Vec }, /// Failed to appropriately tag the pulled image. #[error("Failed to tag pulled image '{source}' as '{image}'")] ImageTagError { image: Box, image_source: String, source: bollard::errors::Error }, @@ -585,6 +600,10 @@ pub enum DockerError { #[error("Failed to remove image '{}' (id: {}) from Docker engine", image.name(), id)] ImageRemoveError { image: Box, id: String, source: bollard::errors::Error }, + /// Failed to list the images in a docker instance. + #[error("Could not list images")] + ImageList { source: bollard::errors::Error }, + /// Could not open the given image.tar. #[error("Could not open given Docker image file '{}'", path.display())] ImageTarOpenError { path: PathBuf, source: std::io::Error }, diff --git a/brane-tsk/src/input.rs b/brane-tsk/src/input.rs index 6bd98eda..8f38cc6f 100644 --- a/brane-tsk/src/input.rs +++ b/brane-tsk/src/input.rs @@ -4,7 +4,7 @@ // Created: // 22 May 2023, 13:13:51 // Last edited: -// 08 Feb 2024, 15:16:20 +// 14 Nov 2024, 17:48:33 // Auto updated? // Yes // @@ -17,9 +17,6 @@ use std::error; use std::fmt::{Debug, Display, Formatter, Result as FResult}; use std::str::FromStr; -use brane_ast::DataType; -use brane_ast::ast::{ClassDef, VarDef}; -use brane_ast::spec::BuiltinClasses; use brane_exe::FullValue; use console::{Term, style}; use dialoguer::theme::ColorfulTheme; @@ -28,6 +25,9 @@ use log::debug; use specifications::data::DataIndex; use specifications::package::PackageInfo; use specifications::version::Version; +use specifications::wir::builtins::BuiltinClasses; +use specifications::wir::data_type::DataType; +use specifications::wir::{ClassDef, VarDef}; /***** ERRORS *****/ @@ -117,7 +117,7 @@ pub fn prompt_for_input(data_index: &DataIndex, package: &PackageInfo) -> Result package: None, version: None, - props: builtin.props().into_iter().map(|p| p.into()).collect(), + props: builtin.props().into_iter().map(|(name, dtype)| VarDef { name: (*name).into(), data_type: dtype.clone() }).collect(), // We don't care for methods anyway methods: vec![], }) { diff --git a/brane-tsk/src/spec.rs b/brane-tsk/src/spec.rs index 500f2eb9..c3aaec76 100644 --- a/brane-tsk/src/spec.rs +++ b/brane-tsk/src/spec.rs @@ -4,7 +4,7 @@ // Created: // 24 Oct 2022, 16:42:17 // Last edited: -// 12 Apr 2023, 12:57:54 +// 14 Nov 2024, 17:23:56 // Auto updated? // Yes // @@ -15,9 +15,9 @@ use std::fmt::{Display, Formatter, Result as FResult}; use std::str::FromStr; -use brane_ast::Workflow; use brane_exe::FullValue; use log::warn; +use specifications::wir::Workflow; use specifications::working::TaskStatus; use uuid::Uuid; diff --git a/docker-compose-central.yml b/docker-compose-central.yml index 20eba7f6..711af78f 100644 --- a/docker-compose-central.yml +++ b/docker-compose-central.yml @@ -1,5 +1,3 @@ -version: '3.6' - services: ############### ## AUXILLARY ## diff --git a/docker-compose-proxy.yml b/docker-compose-proxy.yml index c778b8da..e7c204e6 100644 --- a/docker-compose-proxy.yml +++ b/docker-compose-proxy.yml @@ -1,5 +1,3 @@ -version: '3.6' - services: ############### ## AUXILLARY ## diff --git a/docker-compose-worker.yml b/docker-compose-worker.yml index 85f9e73d..bc98957c 100644 --- a/docker-compose-worker.yml +++ b/docker-compose-worker.yml @@ -1,5 +1,3 @@ -version: '3.6' - services: ############### ## AUXILLARY ## @@ -31,17 +29,22 @@ services: image: brane-chk:${BRANE_VERSION:-latest} init: true container_name: ${CHK_NAME:-brane-chk} - command: -s "node-file-path=/node.yml" ports: - - "0.0.0.0:${CHK_PORT}:${CHK_PORT}" + - "0.0.0.0:${CHK_DELIB_PORT}:${CHK_DELIB_PORT}" + - "0.0.0.0:${CHK_STORE_PORT}:${CHK_STORE_PORT}" restart: always environment: - - ADDRESS=0.0.0.0:${CHK_PORT} + - NODE_CONFIG_PATH=/home/brane/node.yml + - POLICY_DB_PATH=/home/brane/policy.db + - POLICY_DELIB_KEYS_PATH=/home/brane/delib_keys.json + - POLICY_STORE_KEYS_PATH=/home/brane/store_keys.json + - DELIB_ADDRESS=0.0.0.0:${CHK_DELIB_PORT} + - STORE_ADDRESS=0.0.0.0:${CHK_STORE_PORT} volumes: - - ${NODE_CONFIG_PATH}:/node.yml - - ${POLICY_DB}:/data/policy.db - - ${POLICY_DELIBERATION_SECRET}:/examples/config/jwk_set_delib.json - - ${POLICY_EXPERT_SECRET}:/examples/config/jwk_set_expert.json + - ${NODE_CONFIG_PATH}:/home/brane/node.yml + - ${POLICY_DB}:/home/brane/policy.db + - ${POLICY_DELIB_KEYS}:/home/brane/delib_keys.json + - ${POLICY_STORE_KEYS}:/home/brane/store_keys.json extra_hosts: - host.docker.internal:host-gateway @@ -49,7 +52,7 @@ services: brane-reg: image: brane-reg:${BRANE_VERSION:-latest} container_name: ${REG_NAME:-brane-reg} - command: --debug + command: --debug --delib-token "${CHECKER_DELIB_TOKEN}" ports: - "0.0.0.0:${REG_PORT}:${REG_PORT}" restart: always @@ -57,7 +60,6 @@ services: - ${NODE_CONFIG_PATH}:/node.yml - ${BACKEND}:${BACKEND} - ${CERTS}:${CERTS} - - ${POLICY_DELIBERATION_SECRET}:${POLICY_DELIBERATION_SECRET} - ${DATA}:${DATA} - ${RESULTS}:${RESULTS} extra_hosts: @@ -67,7 +69,7 @@ services: brane-job: image: brane-job:${BRANE_VERSION:-latest} container_name: ${JOB_NAME:-brane-job} - command: --debug + command: --debug --delib-token "${CHECKER_DELIB_TOKEN}" ports: - "0.0.0.0:${JOB_PORT}:${JOB_PORT}" restart: always @@ -75,13 +77,12 @@ services: - ${NODE_CONFIG_PATH}:/node.yml - ${BACKEND}:${BACKEND} - ${CERTS}:${CERTS} - - ${POLICY_DELIBERATION_SECRET}:${POLICY_DELIBERATION_SECRET} - ${PACKAGES}:${PACKAGES} - ${DATA}:${DATA} - ${RESULTS}:${RESULTS} - ${TEMP_DATA}:${TEMP_DATA} - ${TEMP_RESULTS}:${TEMP_RESULTS} - - /var/run/docker.sock:/var/run/docker.sock + - /var/run/docker.sock:/var/run/docker.sock:uid=1000 extra_hosts: - host.docker.internal:host-gateway diff --git a/specifications/Cargo.toml b/specifications/Cargo.toml index 6d2dba2b..a0d18c3f 100644 --- a/specifications/Cargo.toml +++ b/specifications/Cargo.toml @@ -16,14 +16,17 @@ chrono = { version = "0.4.35", features = ["serde"] } const_format = "0.2.22" enum-debug.workspace = true futures = "0.3.24" -# lazy_static = "1.4.0" jsonwebtoken = "9.2.0" +lazy_static = "1.5.0" log = "0.4.22" +num-traits = "0.2.18" parking_lot = { version = "0.12.1", features = ["serde"] } prost = "0.13.2" +rand = "0.9.0" semver = "1.0.0" serde = { version = "1.0.204", features = ["derive", "rc"] } serde_json = "1.0.120" +serde_json_any_key = "2.0.0" serde_test = "1.0.0" serde_with = "3.0.0" serde_yaml = { version = "0.0.10", package = "serde_yml" } @@ -33,6 +36,9 @@ thiserror = "2.0.0" tonic = "0.12.0" uuid = { version = "1.7.0", features = ["serde", "v4"] } +policy-reasoner = { git = "https://github.com/BraneFramework/policy-reasoner", branch = "lib-refactor", features = ["eflint-haskell-reasoner"] } +policy-store = { git = "https://github.com/BraneFramework/policy-store", features = ["axum-server-spec"] } + # Workspace dependencies reqwest = { workspace = true, features = ["json", "stream"] } diff --git a/specifications/src/address.rs b/specifications/src/address.rs index dcf5f7f8..c45d7cd9 100644 --- a/specifications/src/address.rs +++ b/specifications/src/address.rs @@ -4,7 +4,7 @@ // Created: // 26 Jan 2023, 09:41:51 // Last edited: -// 12 Jan 2024, 11:51:07 +// 29 Apr 2025, 13:57:21 // Auto updated? // Yes // @@ -22,38 +22,375 @@ use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; use std::str::FromStr; use enum_debug::EnumDebug; -use log::trace; use serde::de::{self, Deserializer, Visitor}; use serde::ser::Serializer; use serde::{Deserialize, Serialize}; +use thiserror::Error; /***** ERRORS *****/ -/// Errors that relate to parsing Addresses. -#[derive(Debug, thiserror::Error)] -pub enum AddressError { - /// Invalid port number. - #[error("Illegal port number '{raw}'")] - IllegalPortNumber { raw: String, source: std::num::ParseIntError }, - /// Missing the colon separator (':') in the address. - #[error("Missing address/port separator ':' in '{raw}' (did you forget to define a port?)")] +/// Errors that relate to parsing [`Host`]s. +#[derive(Debug, Error)] +pub enum HostParseError { + /// No input was given. + #[error("No host given")] + NoInput, + /// The input contained an illegal character for a hostname. + #[error("Found illegal character {c:?} in host {raw:?} (only a-z, A-Z, 0-9 and '-' are accepted)")] + IllegalChar { c: char, raw: String }, +} + +/// Errors that relate to parsing [`Address`]es. +#[derive(Debug, Error)] +pub enum AddressParseError { + /// Failed to correctly parse the hostname. + #[error("Failed to parse {raw:?} as a hostname")] + IllegalHost { raw: String, source: HostParseError }, + /// Failed to correctly parse the port. + #[error("Failed to parse {raw:?} as a port number")] + IllegalPort { raw: String, source: std::num::ParseIntError }, + /// There wasn't a colon in the input. + #[error("No colon found in input {raw:?}")] MissingColon { raw: String }, - /// Port not found when translating an [`AddressOpt`] into an [`Address`]. - #[error("Address '{addr}' does not have a port")] + /// A given [`AddressOpt`] was missing a port. + #[error("Address {addr} has no port defined")] MissingPort { addr: AddressOpt }, + /// No input was given. + #[error("No address given")] + NoInput, } + + + + /***** LIBRARY *****/ -/// Defines a more lenient alternative to a [`std::net::SocketAddr`] that also accepts hostnames. -#[derive(Clone, Debug, EnumDebug)] -pub enum Address { - /// It's an Ipv4 address. - Ipv4(Ipv4Addr, u16), - /// It's an Ipv6 address. - Ipv6(Ipv6Addr, u16), +/// Defines the possible types of hostnames. +/// +/// # Generics +/// - `'a`: The lifetime of the source text from which this host is parsed. It refers to it +/// internally through a [copy-on-write](Cow) pointer, so if it holds by ownership, this lifetime +/// can be `'static`. +#[derive(Clone, Debug, EnumDebug, Eq, Hash, PartialEq)] +pub enum Host { + /// It's an IPv4 address. + IPv4(Ipv4Addr), + /// It's an IPv6 address. + IPv6(Ipv6Addr), /// It's a hostname. - Hostname(String, u16), + Name(String), +} +// Constructors +impl Host { + /// Constructor for the Host that initializes it for the given IPv4 address. + /// + /// # Arguments + /// - `b1`: The first byte of the IP address. + /// - `b2`: The second byte of the IP address. + /// - `b3`: The third byte of the IP address. + /// - `b4`: The fourth byte of the IP address. + /// + /// # Returns + /// A new Host that is referred to by IPv4. + #[inline] + pub const fn new_ipv4(b1: u8, b2: u8, b3: u8, b4: u8) -> Self { Self::IPv4(Ipv4Addr::new(b1, b2, b3, b4)) } + + /// Constructor for the Host that initializes it for the given IPv6 address. + /// + /// # Arguments + /// - `b1`: The first pair of bytes of the IP address. + /// - `b2`: The second pair of bytes of the IP address. + /// - `b3`: The third pair of bytes of the IP address. + /// - `b4`: The fourth pair of bytes of the IP address. + /// - `b5`: The fifth pair of bytes of the IP address. + /// - `b6`: The sixth pair of bytes of the IP address. + /// - `b7`: The seventh pair of bytes of the IP address. + /// - `b8`: The eight pair of bytes of the IP address. + /// + /// # Returns + /// A new Host that is referred to by IPv6. + #[inline] + pub const fn new_ipv6(b1: u16, b2: u16, b3: u16, b4: u16, b5: u16, b6: u16, b7: u16, b8: u16) -> Self { + Self::IPv6(Ipv6Addr::new(b1, b2, b3, b4, b5, b6, b7, b8)) + } + + /// Constructor for the Host that initializes it for the given hostname. + /// + /// # Arguments + /// - `name`: The string name by which the host is known. + /// + /// # Returns + /// A new Host that is referred to by DNS name. + #[inline] + pub fn new_name(name: impl Into) -> Self { Self::Name(name.into()) } +} +// Accessessors +impl Host { + /// Checks whether this Host is an IP address ([IPv4](Host::IPv4) or [IPv6](Host::IPv6)). + /// + /// # Returns + /// True if it is, or false if it isn't. + #[inline] + pub const fn is_ip(&self) -> bool { matches!(self, Self::IPv4(_) | Self::IPv6(_)) } + + /// Checks whether this Host is an [IPv4 address](Host::IPv4). + /// + /// # Returns + /// True if it is, or false if it isn't. + #[inline] + pub const fn is_ipv4(&self) -> bool { matches!(self, Self::IPv4(_)) } + + /// Checks whether this Host is an [IPv6 address](Host::IPv6). + /// + /// # Returns + /// True if it is, or false if it isn't. + #[inline] + pub const fn is_ipv6(&self) -> bool { matches!(self, Self::IPv6(_)) } + + /// Checks whether this Host is a [hostname](Host::Name). + /// + /// # Returns + /// True if it is, or false if it isn't. + #[inline] + pub const fn is_name(&self) -> bool { matches!(self, Self::Name(_)) } + + /// Assumes self is an [IPv4 address](Host::IPv4) and provides read-only access to it. + /// + /// # Returns + /// A reference to the internal [`Ipv4Addr`]. + /// + /// # Panics + /// This function panics if self is actually NOT an [IPv4 address](Host::IPv4). + #[inline] + #[track_caller] + pub fn ipv4(&self) -> &Ipv4Addr { if let Self::IPv4(addr) = self { addr } else { panic!("Cannot unwrap {:?} as an Host::IPv4", self.variant()) } } + + /// Assumes self is an [IPv6 address](Host::IPv6) and provides read-only access to it. + /// + /// # Returns + /// A reference to the internal [`Ipv6Addr`]. + /// + /// # Panics + /// This function panics if self is actually NOT an [IPv6 address](Host::IPv6). + #[inline] + #[track_caller] + pub fn ipv6(&self) -> &Ipv6Addr { if let Self::IPv6(addr) = self { addr } else { panic!("Cannot unwrap {:?} as an Host::IPv6", self.variant()) } } + + /// Assumes self is a [hostname](Host::Name) and provides read-only access to it. + /// + /// # Returns + /// A reference to the internal [`str`]. + /// + /// # Panics + /// This function panics if self is actually NOT a [hostname](Host::Name). + #[inline] + #[track_caller] + pub fn name(&self) -> &str { + if let Self::Name(name) = self { name.as_ref() } else { panic!("Cannot unwrap {:?} as an Host::Name", self.variant()) } + } + + /// Assumes self is an [IPv4 address](Host::IPv4) and provides mutable access to it. + /// + /// # Returns + /// A mutable reference to the internal [`Ipv4Addr`]. + /// + /// # Panics + /// This function panics if self is actually NOT an [IPv4 address](Host::IPv4). + #[inline] + #[track_caller] + pub fn ipv4_mut(&mut self) -> &mut Ipv4Addr { + if let Self::IPv4(addr) = self { addr } else { panic!("Cannot unwrap {:?} as an Host::IPv4", self.variant()) } + } + + /// Assumes self is an [IPv6 address](Host::IPv6) and provides mutable access to it. + /// + /// # Returns + /// A mutable reference to the internal [`Ipv6Addr`]. + /// + /// # Panics + /// This function panics if self is actually NOT an [IPv6 address](Host::IPv6). + #[inline] + #[track_caller] + pub fn ipv6_mut(&mut self) -> &mut Ipv6Addr { + if let Self::IPv6(addr) = self { addr } else { panic!("Cannot unwrap {:?} as an Host::IPv6", self.variant()) } + } + + /// Assumes self is a [hostname](Host::Name) and provides mutable access to it. + /// + /// Note that the name may be stored by read-only reference to the source text. If so, calling + /// this function will force a clone of the inner text to allow it to become mutable. + /// + /// # Returns + /// A mutable reference to the internal [`str`]. + /// + /// # Panics + /// This function panics if self is actually NOT a [hostname](Host::Name). + #[inline] + #[track_caller] + pub fn name_mut(&mut self) -> &mut String { + if let Self::Name(name) = self { name } else { panic!("Cannot unwrap {:?} as an Host::Name", self.variant()) } + } + + /// Assumes self is an [IPv4 address](Host::IPv4) and returns the inner address. + /// + /// # Returns + /// The internal [`Ipv4Addr`]. + /// + /// # Panics + /// This function panics if self is actually NOT an [IPv4 address](Host::IPv4). + #[inline] + #[track_caller] + pub fn into_ipv4(self) -> Ipv4Addr { + if let Self::IPv4(addr) = self { addr } else { panic!("Cannot unwrap {:?} as an Host::IPv4", self.variant()) } + } + + /// Assumes self is an [IPv6 address](Host::IPv6) and returns the inner address. + /// + /// # Returns + /// The internal [`Ipv6Addr`]. + /// + /// # Panics + /// This function panics if self is actually NOT an [IPv6 address](Host::IPv6). + #[inline] + #[track_caller] + pub fn into_ipv6(self) -> Ipv6Addr { + if let Self::IPv6(addr) = self { addr } else { panic!("Cannot unwrap {:?} as an Host::IPv6", self.variant()) } + } + + /// Assumes self is a [hostname](Host::Name) and returns the inner name. + /// + /// # Returns + /// The internal [`str`]. + /// + /// # Panics + /// This function panics if self is actually NOT a [hostname](Host::Name). + #[inline] + #[track_caller] + pub fn into_name(self) -> String { + if let Self::Name(name) = self { name } else { panic!("Cannot unwrap {:?} as an Host::Name", self.variant()) } + } +} +// Formatting +impl Display for Host { + fn fmt(&self, f: &mut Formatter<'_>) -> FResult { + match self { + Self::IPv4(addr) => addr.fmt(f), + Self::IPv6(addr) => addr.fmt(f), + Self::Name(name) => name.fmt(f), + } + } } +// De/Serialization +impl<'de: 'a, 'a> Deserialize<'de> for Host { + #[inline] + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + /// Visitor for the `Host`. + pub struct HostVisitor; + impl<'de> Visitor<'de> for HostVisitor { + type Value = Host; + + #[inline] + fn expecting(&self, f: &mut Formatter) -> FResult { write!(f, "an IP address or a hostname") } + + #[inline] + fn visit_str(self, v: &str) -> Result + where + E: de::Error, + { + match Host::from_str(v) { + Ok(host) => Ok(host), + Err(err) => Err(E::custom(err)), + } + } + } + + // Call it + deserializer.deserialize_string(HostVisitor) + } +} +impl Serialize for Host { + #[inline] + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} +impl FromStr for Host { + type Err = HostParseError; + + #[inline] + fn from_str(s: &str) -> Result { + match IpAddr::from_str(s) { + Ok(IpAddr::V4(addr)) => Ok(Self::IPv4(addr)), + Ok(IpAddr::V6(addr)) => Ok(Self::IPv6(addr)), + Err(_) => { + // Assert there is *something* + if s.is_empty() { + return Err(HostParseError::NoInput); + } + + // Assert it's only good + for c in s.chars() { + if (c < 'a' || c > 'z') && (c < 'A' && c > 'Z') && (c < '0' && c > '9') && c != '-' { + return Err(HostParseError::IllegalChar { c, raw: s.into() }); + } + } + + // OK, it's good + Ok(Self::Name(s.into())) + }, + } + } +} +// Conversion +impl From for Host { + #[inline] + fn from(value: IpAddr) -> Self { + match value { + IpAddr::V4(addr) => Self::from(addr), + IpAddr::V6(addr) => Self::from(addr), + } + } +} +impl From for Host { + #[inline] + fn from(value: Ipv4Addr) -> Self { Self::IPv4(value) } +} +impl From for Host { + #[inline] + fn from(value: Ipv6Addr) -> Self { Self::IPv6(value) } +} +impl<'a> From<&'a str> for Host { + #[inline] + fn from(value: &'a str) -> Self { Self::Name(value.into()) } +} +impl From for Host { + #[inline] + fn from(value: String) -> Self { Self::Name(value) } +} + + + +/// Defines a more lenient alternative to a [`SocketAddr`](std::net::SocketAddr) that also accepts +/// hostnames. +/// +/// # Generics +/// - `'a`: The lifetime of the source text from which this address is parsed. It refers to it +/// internally through a [copy-on-write](Cow) pointer, so if it holds by ownership, this lifetime +/// can be `'static`. +#[derive(Clone, Debug)] +pub struct Address { + /// The host-part of the address. + pub host: Host, + /// The port-part of the address. + pub port: u16, +} +// Constructors impl Address { /// Constructor for the Address that initializes it for the given IPv4 address. /// @@ -67,7 +404,7 @@ impl Address { /// # Returns /// A new Address instance. #[inline] - pub fn ipv4(b1: u8, b2: u8, b3: u8, b4: u8, port: u16) -> Self { Self::Ipv4(Ipv4Addr::new(b1, b2, b3, b4), port) } + pub fn ipv4(b1: u8, b2: u8, b3: u8, b4: u8, port: u16) -> Self { Self { host: Host::new_ipv4(b1, b2, b3, b4), port } } /// Constructor for the Address that initializes it for the given IPv4 address. /// @@ -78,7 +415,7 @@ impl Address { /// # Returns /// A new Address instance. #[inline] - pub fn from_ipv4(ipv4: impl Into, port: u16) -> Self { Self::Ipv4(ipv4.into(), port) } + pub fn from_ipv4(ipv4: impl Into, port: u16) -> Self { Self { host: Host::IPv4(ipv4.into()), port } } /// Constructor for the Address that initializes it for the given IPv6 address. /// @@ -98,7 +435,7 @@ impl Address { #[allow(clippy::too_many_arguments)] #[inline] pub fn ipv6(b1: u16, b2: u16, b3: u16, b4: u16, b5: u16, b6: u16, b7: u16, b8: u16, port: u16) -> Self { - Self::Ipv6(Ipv6Addr::new(b1, b2, b3, b4, b5, b6, b7, b8), port) + Self { host: Host::new_ipv6(b1, b2, b3, b4, b5, b6, b7, b8), port } } /// Constructor for the Address that initializes it for the given IPv6 address. @@ -110,7 +447,7 @@ impl Address { /// # Returns /// A new Address instance. #[inline] - pub fn from_ipv6(ipv6: impl Into, port: u16) -> Self { Self::Ipv6(ipv6.into(), port) } + pub fn from_ipv6(ipv6: impl Into, port: u16) -> Self { Self { host: Host::IPv6(ipv6.into()), port } } /// Constructor for the Address that initializes it for the given hostname. /// @@ -121,47 +458,20 @@ impl Address { /// # Returns /// A new Address instance. #[inline] - pub fn hostname(hostname: impl Into, port: u16) -> Self { Self::Hostname(hostname.into(), port) } - + pub fn hostname(hostname: impl Into, port: u16) -> Self { Self { host: Host::new_name(hostname), port } } +} +// Accessors +impl Address { /// Returns the domain-part, as a (serialized) string version. /// /// # Returns /// A `Cow` that either contains a reference to the already String hostname, or else a newly created string that is the serialized version of an IP. #[inline] - pub fn domain(&self) -> Cow<'_, str> { - use Address::*; - match self { - Ipv4(addr, _) => format!("{addr}").into(), - Ipv6(addr, _) => format!("{addr}").into(), - Hostname(addr, _) => addr.into(), - } - } - - /// Returns the port-part, as a number. - /// - /// # Returns - /// A `u16` that is the port. - #[inline] - pub fn port(&self) -> u16 { - use Address::*; - match self { - Ipv4(_, port) => *port, - Ipv6(_, port) => *port, - Hostname(_, port) => *port, - } - } - - /// Returns the port-part as a mutable number. - /// - /// # Returns - /// A mutable reference to the `u16` that is the port. - #[inline] - pub fn port_mut(&mut self) -> &mut u16 { - use Address::*; - match self { - Ipv4(_, port) => port, - Ipv6(_, port) => port, - Hostname(_, port) => port, + pub fn domain(&self) -> Cow { + match &self.host { + Host::IPv4(addr) => Cow::Owned(addr.to_string()), + Host::IPv6(addr) => Cow::Owned(addr.to_string()), + Host::Name(name) => Cow::Borrowed(name), } } @@ -170,43 +480,40 @@ impl Address { /// # Returns /// True if it is, false if it isn't. #[inline] - pub fn is_hostname(&self) -> bool { matches!(self, Self::Hostname(_, _)) } + pub const fn is_hostname(&self) -> bool { self.host.is_name() } /// Returns if this Address is an `Address::Ipv4` or `Address::Ipv6`. /// /// # Returns /// True if it is, false if it isn't. #[inline] - pub fn is_ip(&self) -> bool { self.is_ipv4() || self.is_ipv6() } + pub const fn is_ip(&self) -> bool { self.host.is_ip() } /// Returns if this Address is an `Address::Ipv4`. /// /// # Returns /// True if it is, false if it isn't. #[inline] - pub fn is_ipv4(&self) -> bool { matches!(self, Self::Ipv4(_, _)) } + pub const fn is_ipv4(&self) -> bool { self.host.is_ipv4() } /// Returns if this Address is an `Address::Ipv6`. /// /// # Returns /// True if it is, false if it isn't. #[inline] - pub fn is_ipv6(&self) -> bool { matches!(self, Self::Ipv6(_, _)) } - + pub const fn is_ipv6(&self) -> bool { self.host.is_ipv6() } +} +// Formatting +impl Address { /// Returns a formatter that deterministically and parseably serializes the Address. #[inline] - pub fn serialize(&self) -> impl '_ + Display { self } + pub const fn serialize(&self) -> impl '_ + Display { self } } impl Display for Address { - fn fmt(&self, f: &mut Formatter<'_>) -> FResult { - use Address::*; - match self { - Ipv4(addr, port) => write!(f, "{addr}:{port}"), - Ipv6(addr, port) => write!(f, "{addr}:{port}"), - Hostname(addr, port) => write!(f, "{addr}:{port}"), - } - } + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> FResult { write!(f, "{}:{}", self.host, self.port) } } +// De/Serialization impl Serialize for Address { #[inline] fn serialize(&self, serializer: S) -> Result @@ -224,7 +531,7 @@ impl<'de> Deserialize<'de> for Address { { /// Defines the visitor for the Address struct AddressVisitor; - impl Visitor<'_> for AddressVisitor { + impl<'de> Visitor<'de> for AddressVisitor { type Value = Address; #[inline] @@ -235,11 +542,7 @@ impl<'de> Deserialize<'de> for Address { where E: de::Error, { - // Attempt to serialize the incoming string - match Address::from_str(v) { - Ok(address) => Ok(address), - Err(err) => Err(E::custom(err)), - } + Address::from_str(v).map_err(E::custom) } } @@ -248,36 +551,30 @@ impl<'de> Deserialize<'de> for Address { } } impl FromStr for Address { - type Err = AddressError; + type Err = AddressParseError; fn from_str(s: &str) -> Result { - // Attempt to find the colon first - let colon_pos: usize = match s.rfind(':') { - Some(pos) => pos, - None => { - return Err(AddressError::MissingColon { raw: s.into() }); - }, - }; + // Assert there is *something* + if s.is_empty() { + return Err(AddressParseError::NoInput); + } - // Split it on that - let (address, port): (&str, &str) = (&s[..colon_pos], &s[colon_pos + 1..]); + // Check the split + let (host, port): (&str, &str) = if let Some(pos) = s.find(':') { + (&s[..pos], &s[pos + 1..]) + } else { + return Err(AddressParseError::MissingColon { raw: s.into() }); + }; - // Parse the port - let port: u16 = u16::from_str(port).map_err(|source| AddressError::IllegalPortNumber { raw: port.into(), source })?; + // Parse the host + let host: Host = Host::from_str(host).map_err(|source| AddressParseError::IllegalHost { raw: host.into(), source })?; + let port: u16 = u16::from_str(port).map_err(|source| AddressParseError::IllegalPort { raw: port.into(), source })?; - // Resolve the address to a new instance of ourselves - match IpAddr::from_str(address) { - Ok(address) => match address { - IpAddr::V4(ip) => Ok(Self::Ipv4(ip, port)), - IpAddr::V6(ip) => Ok(Self::Ipv6(ip, port)), - }, - Err(source) => { - trace!("Parsing '{}' as a hostname, but might be an invalid IP address (parser feedback: {})", address, source); - Ok(Self::Hostname(address.into(), port)) - }, - } + // OK + Ok(Self { host, port }) } } +// Conversion impl AsRef
for Address { #[inline] fn as_ref(&self) -> &Self { self } @@ -291,52 +588,28 @@ impl From<&mut Address> for Address { fn from(value: &mut Address) -> Self { value.clone() } } impl TryFrom for Address { - type Error = AddressError; + type Error = AddressParseError; #[inline] fn try_from(value: AddressOpt) -> Result { - match value { - AddressOpt::Ipv4(host, port_opt) => { - if let Some(port) = port_opt { - Ok(Self::Ipv4(host, port)) - } else { - Err(AddressError::MissingPort { addr: AddressOpt::Ipv4(host, None) }) - } - }, - - AddressOpt::Ipv6(host, port_opt) => { - if let Some(port) = port_opt { - Ok(Self::Ipv6(host, port)) - } else { - Err(AddressError::MissingPort { addr: AddressOpt::Ipv6(host, None) }) - } - }, - - AddressOpt::Hostname(host, port_opt) => { - if let Some(port) = port_opt { - Ok(Self::Hostname(host, port)) - } else { - Err(AddressError::MissingPort { addr: AddressOpt::Hostname(host, None) }) - } - }, + match value.port { + Some(port) => Ok(Self { host: value.host, port }), + None => Err(AddressParseError::MissingPort { addr: value }), } } } - - /// Alternative to an [`Address`] that has an optional port part. -#[derive(Clone, Debug, EnumDebug)] -pub enum AddressOpt { - /// It's an Ipv4 address. - Ipv4(Ipv4Addr, Option), - /// It's an Ipv6 address. - Ipv6(Ipv6Addr, Option), - /// It's a hostname. - Hostname(String, Option), +#[derive(Clone, Debug)] +pub struct AddressOpt { + /// The host-part of the address. + pub host: Host, + /// The port-part of the address. + pub port: Option, } +// Constructors impl AddressOpt { - /// Constructor for the `AddressOpt` that initializes it for the given IPv4 address. + /// Constructor for the Address that initializes it for the given IPv4 address. /// /// # Arguments /// - `b1`: The first byte of the IPv4 address. @@ -346,22 +619,22 @@ impl AddressOpt { /// - `port`: The port for this address, if any. /// /// # Returns - /// A new `AddressOpt` instance. + /// A new AddressOpt instance. #[inline] - pub fn ipv4(b1: u8, b2: u8, b3: u8, b4: u8, port: Option) -> Self { Self::Ipv4(Ipv4Addr::new(b1, b2, b3, b4), port) } + pub fn ipv4(b1: u8, b2: u8, b3: u8, b4: u8, port: Option) -> Self { Self { host: Host::new_ipv4(b1, b2, b3, b4), port } } - /// Constructor for the `AddressOpt` that initializes it for the given IPv4 address. + /// Constructor for the AddressOpt that initializes it for the given IPv4 address. /// /// # Arguments /// - `ipv4`: The already constructed IPv4 address to use. /// - `port`: The port for this address, if any. /// /// # Returns - /// A new `AddressOpt` instance. + /// A new AddressOpt instance. #[inline] - pub fn from_ipv4(ipv4: impl Into, port: Option) -> Self { Self::Ipv4(ipv4.into(), port) } + pub fn from_ipv4(ipv4: impl Into, port: Option) -> Self { Self { host: Host::IPv4(ipv4.into()), port } } - /// Constructor for the `AddressOpt` that initializes it for the given IPv6 address. + /// Constructor for the AddressOpt that initializes it for the given IPv6 address. /// /// # Arguments /// - `b1`: The first pair of bytes of the IPv6 address. @@ -375,137 +648,90 @@ impl AddressOpt { /// - `port`: The port for this address, if any. /// /// # Returns - /// A new `AddressOpt` instance. + /// A new AddressOpt instance. #[allow(clippy::too_many_arguments)] #[inline] pub fn ipv6(b1: u16, b2: u16, b3: u16, b4: u16, b5: u16, b6: u16, b7: u16, b8: u16, port: Option) -> Self { - Self::Ipv6(Ipv6Addr::new(b1, b2, b3, b4, b5, b6, b7, b8), port) + Self { host: Host::new_ipv6(b1, b2, b3, b4, b5, b6, b7, b8), port } } - /// Constructor for the `AddressOpt` that initializes it for the given IPv6 address. + /// Constructor for the AddressOpt that initializes it for the given IPv6 address. /// /// # Arguments /// - `ipv6`: The already constructed IPv6 address to use. /// - `port`: The port for this address, if any. /// /// # Returns - /// A new `AddressOpt` instance. + /// A new AddressOpt instance. #[inline] - pub fn from_ipv6(ipv6: impl Into, port: Option) -> Self { Self::Ipv6(ipv6.into(), port) } + pub fn from_ipv6(ipv6: impl Into, port: Option) -> Self { Self { host: Host::IPv6(ipv6.into()), port } } - /// Constructor for the `AddressOpt` that initializes it for the given hostname. + /// Constructor for the AddressOpt that initializes it for the given hostname. /// /// # Arguments - /// - `hostname`: The hostname for this `AddressOpt`. - /// - `port`: The port for this address, if any. + /// - `hostname`: The hostname for this Address. + /// - `port`: The port for this address. /// /// # Returns - /// A new `AddressOpt` instance. + /// A new AddressOpt instance. #[inline] - pub fn hostname(hostname: impl Into, port: Option) -> Self { Self::Hostname(hostname.into(), port) } - + pub fn hostname(hostname: impl Into, port: Option) -> Self { Self { host: Host::new_name(hostname), port } } +} +// Accessors +impl AddressOpt { /// Returns the domain-part, as a (serialized) string version. /// /// # Returns - /// A `Cow` that either contains a reference to the already `String` hostname, or else a newly created string that is the serialized version of an IP. + /// A `Cow` that either contains a reference to the already String hostname, or else a newly created string that is the serialized version of an IP. #[inline] pub fn domain(&self) -> Cow<'_, str> { - use AddressOpt::*; - match self { - Ipv4(addr, _) => format!("{addr}").into(), - Ipv6(addr, _) => format!("{addr}").into(), - Hostname(addr, _) => addr.into(), + match &self.host { + Host::IPv4(addr) => Cow::Owned(addr.to_string()), + Host::IPv6(addr) => Cow::Owned(addr.to_string()), + Host::Name(name) => Cow::Borrowed(name), } } - /// Returns the port-part, as a number. - /// - /// # Returns - /// A `u16` that is the port. - #[inline] - pub fn port(&self) -> Option { - use AddressOpt::*; - match self { - Ipv4(_, port) => *port, - Ipv6(_, port) => *port, - Hostname(_, port) => *port, - } - } - - /// Returns the port-part as a mutable number. - /// - /// # Returns - /// A mutable reference to the `u16` that is the port. - #[inline] - pub fn port_mut(&mut self) -> &mut Option { - use AddressOpt::*; - match self { - Ipv4(_, port) => port, - Ipv6(_, port) => port, - Hostname(_, port) => port, - } - } - - /// Returns if this `AddressOpt` is an `AddressOpt::Hostname`. + /// Returns if this AddressOpt is an `AddressOpt::Hostname`. /// /// # Returns /// True if it is, false if it isn't. #[inline] - pub fn is_hostname(&self) -> bool { matches!(self, Self::Hostname(_, _)) } + pub const fn is_hostname(&self) -> bool { self.host.is_name() } - /// Returns if this `AddressOpt` is an `AddressOpt::Ipv4` or `AddressOpt::Ipv6`. + /// Returns if this AddressOpt is an `AddressOpt::Ipv4` or `AddressOpt::Ipv6`. /// /// # Returns /// True if it is, false if it isn't. #[inline] - pub fn is_ip(&self) -> bool { self.is_ipv4() || self.is_ipv6() } + pub const fn is_ip(&self) -> bool { self.host.is_ip() } - /// Returns if this `AddressOpt` is an `AddressOpt::Ipv4`. + /// Returns if this AddressOpt is an `AddressOpt::Ipv4`. /// /// # Returns /// True if it is, false if it isn't. #[inline] - pub fn is_ipv4(&self) -> bool { matches!(self, Self::Ipv4(_, _)) } + pub const fn is_ipv4(&self) -> bool { self.host.is_ipv4() } - /// Returns if this `AddressOpt` is an `AddressOpt::Ipv6`. + /// Returns if this AddressOpt is an `AddressOpt::Ipv6`. /// /// # Returns /// True if it is, false if it isn't. #[inline] - pub fn is_ipv6(&self) -> bool { matches!(self, Self::Ipv6(_, _)) } - - /// Returns a formatter that deterministically and parseably serializes the `AddressOpt`. + pub const fn is_ipv6(&self) -> bool { self.host.is_ipv6() } +} +// Formatting +impl AddressOpt { + /// Returns a formatter that deterministically and parseably serializes the AddressOpt. #[inline] pub fn serialize(&self) -> impl '_ + Display { self } } impl Display for AddressOpt { fn fmt(&self, f: &mut Formatter<'_>) -> FResult { - use AddressOpt::*; - match self { - Ipv4(addr, port) => { - write!(f, "{addr}")?; - if let Some(port) = port { - write!(f, ":{port}")?; - }; - Ok(()) - }, - Ipv6(addr, port) => { - write!(f, "{addr}")?; - if let Some(port) = port { - write!(f, ":{port}")?; - }; - Ok(()) - }, - Hostname(addr, port) => { - write!(f, "{addr}")?; - if let Some(port) = port { - write!(f, ":{port}")?; - }; - Ok(()) - }, - } + if let Some(port) = self.port { write!(f, "{}:{}", self.host, port) } else { write!(f, "{}", self.host) } } } +// De/Serialization impl Serialize for AddressOpt { #[inline] fn serialize(&self, serializer: S) -> Result @@ -521,9 +747,9 @@ impl<'de> Deserialize<'de> for AddressOpt { where D: Deserializer<'de>, { - /// Defines the visitor for the `AddressOpt` + /// Defines the visitor for the AddressOpt struct AddressOptVisitor; - impl Visitor<'_> for AddressOptVisitor { + impl<'de> Visitor<'de> for AddressOptVisitor { type Value = AddressOpt; #[inline] @@ -535,10 +761,7 @@ impl<'de> Deserialize<'de> for AddressOpt { E: de::Error, { // Attempt to serialize the incoming string - match AddressOpt::from_str(v) { - Ok(address) => Ok(address), - Err(err) => Err(E::custom(err)), - } + AddressOpt::from_str(v).map_err(E::custom) } } @@ -547,32 +770,30 @@ impl<'de> Deserialize<'de> for AddressOpt { } } impl FromStr for AddressOpt { - type Err = AddressError; + type Err = AddressParseError; fn from_str(s: &str) -> Result { - // Attempt to find the colon first and split the string accordingly - let (address, port): (&str, Option<&str>) = match s.rfind(':') { - Some(pos) => (&s[..pos], Some(&s[pos + 1..])), - None => (s, None), - }; + // Assert there is *something* + if s.is_empty() { + return Err(AddressParseError::NoInput); + } - // Parse the port, if any - let port: Option = - port.map(|p| u16::from_str(p).map_err(|source| AddressError::IllegalPortNumber { raw: p.into(), source })).transpose()?; + // Check the split + let (host, port): (&str, Option<&str>) = if let Some(pos) = s.find(':') { (&s[..pos], Some(&s[pos + 1..])) } else { (s, None) }; - // Resolve the address to a new instance of ourselves - match IpAddr::from_str(address) { - Ok(address) => match address { - IpAddr::V4(ip) => Ok(Self::Ipv4(ip, port)), - IpAddr::V6(ip) => Ok(Self::Ipv6(ip, port)), - }, - Err(err) => { - trace!("Parsing '{}' as a hostname, but might be an invalid IP address (parser feedback: {})", address, err); - Ok(Self::Hostname(address.into(), port)) - }, - } + // Parse the host + let host: Host = Host::from_str(host).map_err(|source| AddressParseError::IllegalHost { raw: host.into(), source })?; + let port: Option = if let Some(port) = port { + Some(u16::from_str(port).map_err(|source| AddressParseError::IllegalPort { raw: port.into(), source })?) + } else { + None + }; + + // OK + Ok(Self { host, port }) } } +// Conversion impl AsRef for AddressOpt { #[inline] fn as_ref(&self) -> &Self { self } @@ -587,11 +808,5 @@ impl From<&mut AddressOpt> for AddressOpt { } impl From
for AddressOpt { #[inline] - fn from(value: Address) -> Self { - match value { - Address::Ipv4(host, port) => Self::Ipv4(host, Some(port)), - Address::Ipv6(host, port) => Self::Ipv6(host, Some(port)), - Address::Hostname(host, port) => Self::Hostname(host, Some(port)), - } - } + fn from(value: Address) -> Self { Self { host: value.host, port: Some(value.port) } } } diff --git a/specifications/src/checking.rs b/specifications/src/checking.rs index 91ef12ea..8f460e1e 100644 --- a/specifications/src/checking.rs +++ b/specifications/src/checking.rs @@ -4,7 +4,7 @@ // Created: // 07 Feb 2024, 11:54:14 // Last edited: -// 06 Mar 2024, 14:03:32 +// 02 May 2025, 15:01:38 // Auto updated? // Yes // @@ -13,24 +13,571 @@ //! with the `policy-reasoner`. // -use reqwest::Method; - - -/***** CONSTANTS *****/ -/// Defines the API path to fetch the checker's current list of policies. -pub const POLICY_API_LIST_POLICIES: (Method, &str) = (Method::GET, "v1/management/policies"); -/// Defines the API path to fetch the currently active version on the checker. -pub const POLICY_API_GET_ACTIVE_VERSION: (Method, &str) = (Method::GET, "v1/management/policies/active"); -/// Defines the API path to update the currently active version on the checker. -pub const POLICY_API_SET_ACTIVE_VERSION: (Method, &str) = (Method::PUT, "v1/management/policies/active"); -/// Defines the API path to add a new policy version to the checker. -pub const POLICY_API_ADD_VERSION: (Method, &str) = (Method::POST, "v1/management/policies"); -/// Defines the API path to fetch a policy's body from a checker. -pub const POLICY_API_GET_VERSION: (Method, fn(i64) -> String) = (Method::GET, |version: i64| format!("v1/management/policies/{version}")); - -/// Defines the API path to check if a workflow as a whole is permitted to be executed. -pub const DELIBERATION_API_WORKFLOW: (Method, &str) = (Method::POST, "v1/deliberation/execute-workflow"); -/// Defines the API path to check if a task in a workflow is permitted to be executed. -pub const DELIBERATION_API_EXECUTE_TASK: (Method, &str) = (Method::POST, "v1/deliberation/execute-task"); -/// Defines the API path to check if a dataset in a workflow is permitted to be transferred. -pub const DELIBERATION_API_TRANSFER_DATA: (Method, &str) = (Method::POST, "v1/deliberation/access-data"); + +/***** APIS *****/ +/// Defines the interface to the policy store API. +pub mod store { + use policy_reasoner::reasoners::eflint_haskell::EFlintHaskellReasonerContext; + // Re-export everything from the axum server + pub use policy_store::servers::axum::spec::*; + use reqwest::Method; + use serde::{Deserialize, Serialize}; + + + /// Define the additional reasoner context endpoint + pub const GET_CONTEXT_PATH: EndpointPath = EndpointPath { method: Method::GET, path: "/v2/context" }; + + + /// Defines the response of getting the reasoner context. + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct GetContextResponse { + /// The context as returned by the reasoner + pub context: EFlintHaskellReasonerWithInterfaceContext, + } + + /// Defines the context for the eFLINT reasoner. + pub type EFlintHaskellReasonerWithInterfaceContext = EFlintHaskellReasonerContext; +} + + + + + +/// Defines the interface to the policy deliberation API. +pub mod deliberation { + use std::error::Error; + use std::fmt::{Display, Formatter, Result as FResult}; + + use policy_reasoner::spec::reasonerconn::ReasonerResponse; + use policy_reasoner::spec::reasons::ManyReason; + use policy_store::servers::axum::spec::EndpointPath; + use prost::bytes::{Buf, BufMut}; + use prost::encoding::{DecodeContext, WireType}; + use prost::{DecodeError, Message}; + use reqwest::Method; + use serde::{Deserialize, Serialize}; + + use crate::pc::ProgramCounter; + use crate::wir::Workflow; + + + /***** CONSTANTS *****/ + /// Defines the API path to check if a workflow as a whole is permitted to be executed. + pub const CHECK_WORKFLOW_PATH: EndpointPath = EndpointPath { method: Method::GET, path: "/v2/workflow" }; + /// Defines the API path to check if a task in a workflow is permitted to be executed. + pub const CHECK_TASK_PATH: EndpointPath = EndpointPath { method: Method::GET, path: "/v2/task" }; + /// Defines the API path to check if a dataset in a workflow is permitted to be transferred. + pub const CHECK_TRANSFER_PATH: EndpointPath = EndpointPath { method: Method::GET, path: "/v2/transfer" }; + + + + + + /***** ERRORS *****/ + /// Failed to decode one of the requests. + #[derive(Debug)] + pub enum RequestDecodeError { + /// Failed to decode the workflow in the request. + Workflow { err: serde_json::Error }, + /// Failed to decode the task in the request. + Task { err: serde_json::Error }, + } + impl Display for RequestDecodeError { + #[inline] + fn fmt(&self, f: &mut Formatter) -> FResult { + match self { + Self::Workflow { .. } => write!(f, "Failed to parse workflow in message"), + Self::Task { .. } => write!(f, "Failed to parse task in message"), + } + } + } + impl Error for RequestDecodeError { + #[inline] + fn source(&self) -> Option<&(dyn 'static + Error)> { + match self { + Self::Workflow { err } => Some(err), + Self::Task { err } => Some(err), + } + } + } + + + + + + /***** AUXILLARY *****/ + /// Defines a wrapper around some other struct such that we can wrap one of its fields as a serde + /// JSON implementation. + #[derive(Clone, Debug)] + pub struct Prost { + /// The actual request + request: R, + /// The string buffer we use for parsing. + buffers: Vec, + } + + + + + + /***** API BODIES *****/ + /// Defines the request to send to the [`Server::check_workflow()`] endpoint. + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CheckWorkflowRequest { + /// The usecase that refers to the API to consult for state. + pub usecase: String, + /// The workflow we're parsing. + pub workflow: Workflow, + } + + // Prost impl for the CheckWorkflowRequest + impl Default for Prost { + #[inline] + fn default() -> Self { + Self { request: CheckWorkflowRequest { usecase: String::new(), workflow: Workflow::default() }, buffers: vec![String::new()] } + } + } + impl Prost { + /// Constructor for the Prost that creates it from an existing request. + /// + /// When encoding something, this is needed to properly encode it. + /// + /// # Arguments + /// - `request`: The `R`equest to be encoded. + /// + /// # Returns + /// A new Prost, ready to be encoded. + pub fn new(request: CheckWorkflowRequest) -> Self { + // Serialize the workflow first + let wf: String = match serde_json::to_string(&request.workflow) { + Ok(wf) => wf, + Err(err) => panic!("Failed to serialize given workflow: {err}"), + }; + + // OK, return self + Self { request, buffers: vec![wf] } + } + + /// Retrieves the internal request. + /// + /// Note that this may fail, as the embedded workflow won't be parsed up until this moment. + /// + /// # Returns + /// A new [`CheckWorkflowRequest`] that is ready to use. + /// + /// # Errors + /// This function fails if we failed to parse the internal workflow. + pub fn into_inner(mut self) -> Result { + self.request.workflow = serde_json::from_str(&self.buffers[0]).map_err(|err| RequestDecodeError::Workflow { err })?; + Ok(self.request) + } + } + impl Message for Prost { + fn encode_raw(&self, buf: &mut impl BufMut) + where + Self: Sized, + { + // This is copied from the auto-generated prost code but only for the fields in question + if self.request.usecase != "" { + ::prost::encoding::string::encode(1u32, &self.request.usecase, buf); + } + if self.buffers[0] != "" { + ::prost::encoding::string::encode(2u32, &self.buffers[0], buf); + } + } + + fn merge_field(&mut self, tag: u32, wire_type: WireType, buf: &mut impl Buf, ctx: DecodeContext) -> Result<(), DecodeError> + where + Self: Sized, + { + // This is copied from the auto-generated prost code but only for the fields in question + const STRUCT_NAME: &str = "CheckWorkflowRequest"; + match tag { + 1u32 => ::prost::encoding::string::merge(wire_type, &mut self.request.usecase, buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "usecase"); + error + }), + 2u32 => ::prost::encoding::string::merge(wire_type, &mut self.buffers[0], buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "workflow"); + error + }), + _ => ::prost::encoding::skip_field(wire_type, tag, buf, ctx), + } + } + + fn encoded_len(&self) -> usize { + // This is copied from the auto-generated prost code but only for the fields in question + 0 + if self.request.usecase != "" { ::prost::encoding::string::encoded_len(1u32, &self.request.usecase) } else { 0 } + + if self.buffers[0] != "" { ::prost::encoding::string::encoded_len(2u32, &self.buffers[0]) } else { 0 } + } + + fn clear(&mut self) { + // This is copied from the auto-generated prost code but only for the fields in question + self.request.usecase.clear(); + self.buffers[0].clear(); + } + } + + + + /// Defines the request to send to the [`Server::check_task()`] endpoint. + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CheckTaskRequest { + /// The usecase that refers to the API to consult for state. + pub usecase: String, + /// The workflow we're parsing. + pub workflow: Workflow, + /// The task in the workflow that we want to check specifically. + pub task: ProgramCounter, + } + + // Prost impl for the CheckWorkflowRequest + impl Default for Prost { + #[inline] + fn default() -> Self { + Self { + request: CheckTaskRequest { usecase: String::new(), workflow: Workflow::default(), task: ProgramCounter::default() }, + buffers: vec![String::new(), String::new()], + } + } + } + impl Prost { + /// Constructor for the Prost that creates it from an existing request. + /// + /// When encoding something, this is needed to properly encode it. + /// + /// # Arguments + /// - `request`: The `R`equest to be encoded. + /// + /// # Returns + /// A new Prost, ready to be encoded. + pub fn new(request: CheckTaskRequest) -> Self { + // Serialize the workflow & PC first + let wf: String = match serde_json::to_string(&request.workflow) { + Ok(wf) => wf, + Err(err) => panic!("Failed to serialize given workflow: {err}"), + }; + let pc: String = match serde_json::to_string(&request.task) { + Ok(pc) => pc, + Err(err) => panic!("Failed to serialize given program counter: {err}"), + }; + + // OK, return self + Self { request, buffers: vec![wf, pc] } + } + + /// Retrieves the internal request. + /// + /// Note that this may fail, as the embedded workflow won't be parsed up until this moment. + /// + /// # Returns + /// A new [`CheckTaskRequest`] that is ready to use. + /// + /// # Errors + /// This function fails if we failed to parse the internal workflow. + pub fn into_inner(mut self) -> Result { + self.request.workflow = serde_json::from_str(&self.buffers[0]).map_err(|err| RequestDecodeError::Workflow { err })?; + self.request.task = serde_json::from_str(&self.buffers[1]).map_err(|err| RequestDecodeError::Task { err })?; + Ok(self.request) + } + } + impl Message for Prost { + fn encode_raw(&self, buf: &mut impl BufMut) + where + Self: Sized, + { + // This is copied from the auto-generated prost code but only for the fields in question + if self.request.usecase != "" { + ::prost::encoding::string::encode(1u32, &self.request.usecase, buf); + } + if self.buffers[0] != "" { + ::prost::encoding::string::encode(2u32, &self.buffers[0], buf); + } + if self.buffers[1] != "" { + ::prost::encoding::string::encode(3u32, &self.buffers[1], buf); + } + } + + fn merge_field(&mut self, tag: u32, wire_type: WireType, buf: &mut impl Buf, ctx: DecodeContext) -> Result<(), DecodeError> + where + Self: Sized, + { + // This is copied from the auto-generated prost code but only for the fields in question + const STRUCT_NAME: &'static str = "CheckTaskRequest"; + match tag { + 1u32 => ::prost::encoding::string::merge(wire_type, &mut self.request.usecase, buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "usecase"); + error + }), + 2u32 => ::prost::encoding::string::merge(wire_type, &mut self.buffers[0], buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "workflow"); + error + }), + 3u32 => ::prost::encoding::string::merge(wire_type, &mut self.buffers[1], buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "task"); + error + }), + _ => ::prost::encoding::skip_field(wire_type, tag, buf, ctx), + } + } + + fn encoded_len(&self) -> usize { + // This is copied from the auto-generated prost code but only for the fields in question + 0 + if self.request.usecase != "" { ::prost::encoding::string::encoded_len(1u32, &self.request.usecase) } else { 0 } + + if self.buffers[0] != "" { ::prost::encoding::string::encoded_len(2u32, &self.buffers[0]) } else { 0 } + + if self.buffers[1] != "" { ::prost::encoding::string::encoded_len(3u32, &self.buffers[1]) } else { 0 } + } + + fn clear(&mut self) { + // This is copied from the auto-generated prost code but only for the fields in question + self.request.usecase.clear(); + self.buffers[0].clear(); + self.buffers[1].clear(); + } + } + + + + /// Defines the request to send to the [`Server::check_transfer()`] endpoint. + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CheckTransferRequest { + /// The usecase that refers to the API to consult for state. + pub usecase: String, + /// The workflow we're parsing. + pub workflow: Workflow, + /// The task in the workflow that we want to check specifically. + pub task: Option, + /// The input in the task that we want to check specifically. + pub input: String, + } + + // Prost impl for the CheckWorkflowRequest + impl Default for Prost { + #[inline] + fn default() -> Self { + Self { + request: CheckTransferRequest { + usecase: String::new(), + workflow: Workflow::default(), + task: Some(ProgramCounter::default()), + input: String::new(), + }, + buffers: vec![String::new(), String::new()], + } + } + } + impl Prost { + /// Constructor for the Prost that creates it from an existing request. + /// + /// When encoding something, this is needed to properly encode it. + /// + /// # Arguments + /// - `request`: The `R`equest to be encoded. + /// + /// # Returns + /// A new Prost, ready to be encoded. + pub fn new(request: CheckTransferRequest) -> Self { + // Serialize the workflow & PC first + let wf: String = match serde_json::to_string(&request.workflow) { + Ok(wf) => wf, + Err(err) => panic!("Failed to serialize given workflow: {err}"), + }; + let pc: String = match serde_json::to_string(&request.task) { + Ok(pc) => pc, + Err(err) => panic!("Failed to serialize given program counter: {err}"), + }; + + // OK, return self + Self { request, buffers: vec![wf, pc] } + } + + /// Retrieves the internal request. + /// + /// Note that this may fail, as the embedded workflow won't be parsed up until this moment. + /// + /// # Returns + /// A new [`CheckTransferRequest`] that is ready to use. + /// + /// # Errors + /// This function fails if we failed to parse the internal workflow. + pub fn into_inner(mut self) -> Result { + self.request.workflow = serde_json::from_str(&self.buffers[0]).map_err(|err| RequestDecodeError::Workflow { err })?; + self.request.task = serde_json::from_str(&self.buffers[1]).map_err(|err| RequestDecodeError::Task { err })?; + Ok(self.request) + } + } + impl Message for Prost { + fn encode_raw(&self, buf: &mut impl BufMut) + where + Self: Sized, + { + // This is copied from the auto-generated prost code but only for the fields in question + if self.request.usecase != "" { + ::prost::encoding::string::encode(1u32, &self.request.usecase, buf); + } + if self.buffers[0] != "" { + ::prost::encoding::string::encode(2u32, &self.buffers[0], buf); + } + if self.buffers[1] != "" { + ::prost::encoding::string::encode(3u32, &self.buffers[1], buf); + } + if self.request.input != "" { + ::prost::encoding::string::encode(4u32, &self.request.input, buf); + } + } + + fn merge_field(&mut self, tag: u32, wire_type: WireType, buf: &mut impl Buf, ctx: DecodeContext) -> Result<(), DecodeError> + where + Self: Sized, + { + // This is copied from the auto-generated prost code but only for the fields in question + const STRUCT_NAME: &str = "CheckTaskRequest"; + match tag { + 1u32 => ::prost::encoding::string::merge(wire_type, &mut self.request.usecase, buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "usecase"); + error + }), + 2u32 => ::prost::encoding::string::merge(wire_type, &mut self.buffers[0], buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "workflow"); + error + }), + 3u32 => ::prost::encoding::string::merge(wire_type, &mut self.buffers[1], buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "task"); + error + }), + 4u32 => ::prost::encoding::string::merge(wire_type, &mut self.request.input, buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "input"); + error + }), + _ => ::prost::encoding::skip_field(wire_type, tag, buf, ctx), + } + } + + fn encoded_len(&self) -> usize { + // This is copied from the auto-generated prost code but only for the fields in question + 0 + if self.request.usecase != "" { ::prost::encoding::string::encoded_len(1u32, &self.request.usecase) } else { 0 } + + if self.buffers[0] != "" { ::prost::encoding::string::encoded_len(2u32, &self.buffers[0]) } else { 0 } + + if self.buffers[1] != "" { ::prost::encoding::string::encoded_len(3u32, &self.buffers[1]) } else { 0 } + + if self.request.input != "" { ::prost::encoding::string::encoded_len(1u32, &self.request.input) } else { 0 } + } + + fn clear(&mut self) { + // This is copied from the auto-generated prost code but only for the fields in question + self.request.usecase.clear(); + self.buffers[0].clear(); + self.buffers[1].clear(); + self.request.input.clear(); + } + } + + + + /// Defines the result of the three checking endpoints. + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CheckResponse { + /// The result + pub verdict: ReasonerResponse, + } + + // Prost impl for the CheckResponse + impl Default for Prost>> { + #[inline] + fn default() -> Self { Self { request: CheckResponse { verdict: ReasonerResponse::Success }, buffers: vec!["1".into()] } } + } + impl Prost>> { + /// Constructor for the Prost that creates it from an existing request. + /// + /// When encoding something, this is needed to properly encode it. + /// + /// # Arguments + /// - `request`: The `R`equest to be encoded. + /// + /// # Returns + /// A new Prost, ready to be encoded. + pub fn new(request: CheckResponse>) -> Self { + // Build the buffers accordingly + let mut buffers = Vec::with_capacity(1 + if let ReasonerResponse::Violated(reasons) = &request.verdict { reasons.len() } else { 0 }); + buffers.push(if matches!(request.verdict, ReasonerResponse::Success) { "1".to_string() } else { "0".to_string() }); + if let ReasonerResponse::Violated(reasons) = request.verdict { + for reason in reasons.into_iter() { + buffers.push(reason); + } + } + + // OK, return self + Self { request: CheckResponse { verdict: ReasonerResponse::Success }, buffers } + } + + /// Retrieves the internal request. + /// + /// Note that this may fail, as the embedded workflow won't be parsed up until this moment. + /// + /// # Returns + /// A new [`CheckResponse`] that is ready to use. + /// + /// # Errors + /// This function fails if we failed to parse the internal workflow. + pub fn into_inner(mut self) -> CheckResponse> { + if self.buffers[0] == "1" { + CheckResponse { verdict: ReasonerResponse::Success } + } else { + CheckResponse { verdict: ReasonerResponse::Violated(self.buffers.drain(1..).collect()) } + } + } + } + impl Message for Prost>> { + fn encode_raw(&self, buf: &mut impl BufMut) + where + Self: Sized, + { + // This is copied from the auto-generated prost code but only for the fields in question + ::prost::encoding::bool::encode(1u32, &(self.buffers[0] == "1"), buf); + ::prost::encoding::string::encode_repeated(2u32, &self.buffers[1..], buf); + } + + fn merge_field(&mut self, tag: u32, wire_type: WireType, buf: &mut impl Buf, ctx: DecodeContext) -> Result<(), DecodeError> + where + Self: Sized, + { + // This is copied from the auto-generated prost code but only for the fields in question + const STRUCT_NAME: &str = "CheckResponse"; + match tag { + 1u32 => { + let mut value = self.buffers[0] == "1"; + ::prost::encoding::bool::merge(wire_type, &mut value, buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "success"); + error + })?; + if value { + self.buffers[0] = "1".into(); + } else { + self.buffers[0] = "0".into(); + } + Ok(()) + }, + 2u32 => { + let mut buffers: Vec = self.buffers.drain(1..).collect(); + ::prost::encoding::string::merge_repeated(wire_type, &mut buffers, buf, ctx).map_err(|mut error| { + error.push(STRUCT_NAME, "reasons"); + error + })?; + self.buffers.extend(buffers); + Ok(()) + }, + _ => ::prost::encoding::skip_field(wire_type, tag, buf, ctx), + } + } + + fn encoded_len(&self) -> usize { + // This is copied from the auto-generated prost code but only for the fields in question + 0 + if self.buffers[0] != "" { ::prost::encoding::string::encoded_len(1u32, &self.buffers[0]) } else { 0 } + + ::prost::encoding::string::encoded_len_repeated(2u32, &self.buffers[1..]) + } + + fn clear(&mut self) { + // This is copied from the auto-generated prost code but only for the fields in question + self.buffers[0].clear(); + self.buffers.truncate(1); + } + } +} diff --git a/specifications/src/driving.rs b/specifications/src/driving.rs index 5ccbc30e..e9568fad 100644 --- a/specifications/src/driving.rs +++ b/specifications/src/driving.rs @@ -4,7 +4,7 @@ // Created: // 06 Jan 2023, 14:43:35 // Last edited: -// 08 Feb 2024, 17:01:30 +// 02 May 2025, 14:18:22 // Auto updated? // Yes // @@ -124,7 +124,7 @@ pub struct ExecuteReply { /// If given, then the driver has stderr to write to the user. #[prost(tag = "4", optional, string)] pub stderr: Option, - /// If given, then the workflow has returned a value to use (`FullValue` encoded as JSON). + /// If given, then the workflow has returned a value to use (a tuple of a `FullValue` and an optional `ProgramCounter` encoded as JSON). #[prost(tag = "5", optional, string)] pub value: Option, } diff --git a/specifications/src/lib.rs b/specifications/src/lib.rs index 071ce5bc..f2120280 100644 --- a/specifications/src/lib.rs +++ b/specifications/src/lib.rs @@ -4,7 +4,7 @@ // Created: // 07 Jun 2023, 16:22:04 // Last edited: -// 01 May 2024, 10:11:07 +// 14 Nov 2024, 16:09:37 // Auto updated? // Yes // @@ -26,9 +26,11 @@ pub mod driving; pub mod errors; pub mod os; pub mod package; +pub mod pc; pub mod planning; pub mod policy; pub mod profiling; pub mod registering; pub mod version; +pub mod wir; pub mod working; diff --git a/brane-exe/src/pc.rs b/specifications/src/pc.rs similarity index 97% rename from brane-exe/src/pc.rs rename to specifications/src/pc.rs index 06c0ecf3..b971a21a 100644 --- a/brane-exe/src/pc.rs +++ b/specifications/src/pc.rs @@ -2,26 +2,28 @@ // by Lut99 // // Created: -// 16 Jan 2024, 09:59:53 +// 14 Nov 2024, 16:09:25 // Last edited: -// 16 Jan 2024, 15:08:41 +// 29 Apr 2025, 13:47:45 // Auto updated? // Yes // // Description: -//! Implements a program counter that correctly serializes. +//! Defines a [`ProgramCounter`] for pointing to locations in +//! [WIR](crate::wir::Workflow)s. // use std::fmt::{Debug, Display, Formatter, Result as FResult}; use std::ops::{Add, AddAssign}; use std::str::FromStr; -use brane_ast::SymTable; -use brane_ast::func_id::FunctionId; use num_traits::AsPrimitive; use serde::de::{self, Deserialize, Deserializer, Visitor}; use serde::ser::{Serialize, SerializeSeq, Serializer}; +use crate::wir::SymTable; +use crate::wir::func_id::FunctionId; + /***** ERRORS *****/ /// Defines errors when parsing [`ProgramCounter`] from a string. @@ -32,7 +34,7 @@ pub enum ProgramCounterParseError { MissingColon { raw: String }, /// Failed to parse the given string as a [`FunctionId`]. #[error(transparent)] - InvalidFunctionId { source: brane_ast::func_id::FunctionIdParseError }, + InvalidFunctionId { source: crate::wir::func_id::FunctionIdParseError }, /// Failed to parse the given string as a numerical index. #[error("Failed to parse '{raw}' as a valid edge index (i.e., unsigned integer)")] InvalidIdx { raw: String, err: std::num::ParseIntError }, diff --git a/specifications/src/registering.rs b/specifications/src/registering.rs index ed02b786..472b9134 100644 --- a/specifications/src/registering.rs +++ b/specifications/src/registering.rs @@ -4,7 +4,7 @@ // Created: // 15 Jan 2024, 14:32:30 // Last edited: -// 07 Feb 2024, 13:49:33 +// 02 May 2025, 14:54:22 // Auto updated? // Yes // @@ -13,6 +13,7 @@ //! registry. // +use policy_reasoner::spec::reasons::ManyReason; use serde::{Deserialize, Serialize}; use serde_json::Value; @@ -56,6 +57,6 @@ pub struct CheckTransferReply { /// The verdict of the checker; `true` means OK, `false` means deny. pub verdict: bool, /// If `verdict` is false, this \*may\* contain reasons why a the transfer was denied. - #[serde(default = "Vec::new", skip_serializing_if = "Vec::is_empty")] - pub reasons: Vec, + #[serde(default = "ManyReason::new", skip_serializing_if = "Vec::is_empty")] + pub reasons: ManyReason, } diff --git a/specifications/src/wir/builtins.rs b/specifications/src/wir/builtins.rs new file mode 100644 index 00000000..68efed31 --- /dev/null +++ b/specifications/src/wir/builtins.rs @@ -0,0 +1,116 @@ +// BUILTINS.rs +// by Lut99 +// +// Created: +// 14 Nov 2024, 15:46:16 +// Last edited: +// 14 Nov 2024, 17:30:04 +// Auto updated? +// Yes +// +// Description: +//! Defines builtin functions & classes in the WIR. +// + +use super::data_type::DataType; + + +/***** LIBRARY *****/ +/// Defines the builtin functions that exist in BraneScript. +#[derive(Clone, Copy, Debug)] +pub enum BuiltinFunctions { + /// The print-function, which prints some text to stdout. + Print, + /// The println-function, which does the same as `Print` but now with a newline appended to the text. + PrintLn, + + /// The len-function, which returns the length of an array. + Len, + + /// The commit_builtin-function, which turns an IntermediateResult into a Data. + CommitResult, +} + +impl BuiltinFunctions { + /// Returns the identifier of this builtin function. + #[inline] + pub fn name(&self) -> &'static str { + use BuiltinFunctions::*; + match self { + Print => "print", + PrintLn => "println", + + Len => "len", + + CommitResult => "commit_result", + } + } + + /// Returns an array with all the builtin functions in it. + #[inline] + pub const fn all() -> [Self; 4] { [Self::Print, Self::PrintLn, Self::Len, Self::CommitResult] } + + /// Checks if the given string is a builtin. + #[inline] + pub fn is_builtin(name: impl AsRef) -> bool { + // Note that the order in which we match (i.e., on self instead of name) is a little awkward but guarantees Rust will warns us if we change the set. + let name: &str = name.as_ref(); + for builtin in Self::all() { + if name == builtin.name() { + return true; + } + } + false + } +} + + + +/// Defines the builtin classes that exist in BraneScript. +#[derive(Clone, Copy, Debug)] +pub enum BuiltinClasses { + /// The data-class. + Data, + /// The intermediate-result-class. + IntermediateResult, +} + +impl BuiltinClasses { + /// Returns the identifier of this builtin class. + #[inline] + pub fn name(&self) -> &'static str { + use BuiltinClasses::*; + match self { + Data => "Data", + IntermediateResult => "IntermediateResult", + } + } + + /// Returns an array with all the builtin classes in it. + #[inline] + pub fn all() -> [Self; 2] { [Self::Data, Self::IntermediateResult] } + + /// Defines the fields of this class. + /// + /// # Returns + /// A list of pairs of the name and the [`DataType`] of that field. + #[inline] + pub fn props(&self) -> &'static [(&'static str, DataType)] { + match self { + Self::Data => &[("name", DataType::String)], + Self::IntermediateResult => &[("path", DataType::String)], + } + } + + /// Defines the methods of this class. + /// + /// # Returns + /// A list of pairs of the name and a pair with the arguments and return type of that method. + #[inline] + pub fn methods(&self) -> &'static [(&'static str, (Vec, DataType))] { + match self { + Self::Data => &[], + Self::IntermediateResult => &[], + } + } +} diff --git a/brane-ast/src/data_type.rs b/specifications/src/wir/data_type.rs similarity index 73% rename from brane-ast/src/data_type.rs rename to specifications/src/wir/data_type.rs index 2b22b863..a52cf171 100644 --- a/brane-ast/src/data_type.rs +++ b/specifications/src/wir/data_type.rs @@ -2,9 +2,9 @@ // by Lut99 // // Created: -// 30 Aug 2022, 12:02:57 +// 14 Nov 2024, 15:45:28 // Last edited: -// 17 Jan 2023, 15:13:15 +// 14 Nov 2024, 15:58:01 // Auto updated? // Yes // @@ -17,7 +17,7 @@ use std::fmt::{Display, Formatter, Result as FResult}; use serde::{Deserialize, Serialize}; -use crate::spec::BuiltinClasses; +use super::builtins::BuiltinClasses; /***** AUXILLARY ERRORS *****/ @@ -185,76 +185,6 @@ impl Display for DataType { } } -impl From for DataType { - #[inline] - fn from(value: brane_dsl::DataType) -> Self { - use brane_dsl::DataType::*; - match value { - Any => Self::Any, - Void => Self::Void, - - Boolean => Self::Boolean, - Integer => Self::Integer, - Real => Self::Real, - String => Self::String, - Semver => Self::Semver, - - Array(a) => Self::Array { elem_type: a.into() }, - Function(sig) => Self::Function { args: sig.args.into_iter().map(|d| d.into()).collect(), ret: Box::new(sig.ret.into()) }, - Class(name) => { - // Match if 'Data' or 'IntermediateResult' - if name == BuiltinClasses::Data.name() { - Self::Data - } else if name == BuiltinClasses::IntermediateResult.name() { - Self::IntermediateResult - } else { - Self::Class { name } - } - }, - } - } -} - -impl From<&brane_dsl::DataType> for DataType { - #[inline] - fn from(value: &brane_dsl::DataType) -> Self { - use brane_dsl::DataType::*; - match value { - Any => Self::Any, - Void => Self::Void, - - Boolean => Self::Boolean, - Integer => Self::Integer, - Real => Self::Real, - String => Self::String, - Semver => Self::Semver, - - Array(a) => Self::Array { elem_type: a.into() }, - Function(sig) => Self::Function { args: sig.args.iter().map(|d| d.into()).collect(), ret: Box::new((&sig.ret).into()) }, - Class(name) => { - // Match if 'Data' or 'IntermediateResult' - if name == BuiltinClasses::Data.name() { - Self::Data - } else if name == BuiltinClasses::IntermediateResult.name() { - Self::IntermediateResult - } else { - Self::Class { name: name.clone() } - } - }, - } - } -} - -impl From> for Box { - #[inline] - fn from(value: Box) -> Self { Self::from(&value) } -} - -impl From<&Box> for Box { - #[inline] - fn from(value: &Box) -> Self { Box::new(DataType::from(value.as_ref())) } -} - impl From<&str> for DataType { fn from(value: &str) -> Self { // First: any arrays are done recursively diff --git a/brane-ast/src/func_id.rs b/specifications/src/wir/func_id.rs similarity index 99% rename from brane-ast/src/func_id.rs rename to specifications/src/wir/func_id.rs index e6a3397d..13b76e31 100644 --- a/brane-ast/src/func_id.rs +++ b/specifications/src/wir/func_id.rs @@ -2,9 +2,9 @@ // by Lut99 // // Created: -// 16 Jan 2024, 11:31:56 +// 14 Nov 2024, 16:01:32 // Last edited: -// 16 Jan 2024, 15:03:01 +// 14 Nov 2024, 16:01:52 // Auto updated? // Yes // diff --git a/brane-ast/src/locations.rs b/specifications/src/wir/locations.rs similarity index 80% rename from brane-ast/src/locations.rs rename to specifications/src/wir/locations.rs index b804a9ba..1de64be8 100644 --- a/brane-ast/src/locations.rs +++ b/specifications/src/wir/locations.rs @@ -4,7 +4,7 @@ // Created: // 07 Sep 2022, 10:48:30 // Last edited: -// 14 Nov 2022, 10:04:13 +// 14 Nov 2024, 16:02:37 // Auto updated? // Yes // @@ -13,7 +13,6 @@ //! node. // -use brane_dsl::location::AllowedLocations; use serde::{Deserialize, Serialize}; @@ -58,13 +57,3 @@ impl Locations { #[inline] pub fn is_restrictive(&self) -> bool { matches!(self, Self::Restricted(_)) } } - -impl From for Locations { - #[inline] - fn from(value: AllowedLocations) -> Self { - match value { - AllowedLocations::All => Self::All, - AllowedLocations::Exclusive(locs) => Self::Restricted(locs.into_iter().map(|l| l.into()).collect()), - } - } -} diff --git a/specifications/src/wir/merge_strategy.rs b/specifications/src/wir/merge_strategy.rs new file mode 100644 index 00000000..95b0bf67 --- /dev/null +++ b/specifications/src/wir/merge_strategy.rs @@ -0,0 +1,76 @@ +// MERGE STRATEGY.rs +// by Lut99 +// +// Created: +// 14 Nov 2024, 16:07:58 +// Last edited: +// 14 Nov 2024, 16:08:25 +// Auto updated? +// Yes +// +// Description: +//! Defines the [`MergeStrategy`], which defines how the results of +//! parallel statements are combined into one. +// + +use serde::{Deserialize, Serialize}; + + +/***** LIBRARY *****/ +/// Defines merge strategies for the parallel statements. +#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Hash, Serialize)] +pub enum MergeStrategy { + /// Take the value that arrived first. The statement will already return as soon as this statement is in, not the rest. + First, + /// Take the value that arrived first. The statement will still block until all values returned. + FirstBlocking, + /// Take the value that arrived last. + Last, + + /// Add all the resulting values together. This means that they must all be numeric. + Sum, + /// Multiple all the resulting values together. This means that they must all be numeric. + Product, + + /// Take the largest value. Use on booleans to get an 'OR'-effect (i.e., it returns true iff there is at least one true). + Max, + /// Take the smallest value. Use on booleans to get an 'AND'-effect (i.e., it returns false iff there is at least one false). + Min, + + /// Returns all values as an Array. + All, + + /// No merge strategy needed + None, +} + +impl From<&str> for MergeStrategy { + #[inline] + fn from(value: &str) -> Self { + match value.to_lowercase().as_str() { + "first" => Self::First, + "first*" => Self::FirstBlocking, + "last" => Self::Last, + + "+" | "sum" => Self::Sum, + "*" | "product" => Self::Product, + + "max" => Self::Max, + "min" => Self::Min, + + "all" => Self::All, + + _ => Self::None, + } + } +} + +impl From<&String> for MergeStrategy { + #[inline] + fn from(value: &String) -> Self { Self::from(value.as_str()) } +} + +impl From for MergeStrategy { + #[inline] + fn from(value: String) -> Self { Self::from(value.as_str()) } +} diff --git a/brane-ast/src/ast.rs b/specifications/src/wir/mod.rs similarity index 85% rename from brane-ast/src/ast.rs rename to specifications/src/wir/mod.rs index ae8fa38b..d4bbba6d 100644 --- a/brane-ast/src/ast.rs +++ b/specifications/src/wir/mod.rs @@ -1,47 +1,51 @@ -// AST.rs +// MOD.rs // by Lut99 // // Created: -// 30 Aug 2022, 11:55:49 +// 14 Nov 2024, 15:43:22 // Last edited: -// 06 Feb 2024, 11:38:29 +// 29 Apr 2025, 13:48:00 // Auto updated? // Yes // // Description: -//! Defines the `brane-ast` AST, which is defined as an acyclic* graph -//! where the nodes are external, orchestratable and policy-sensitive -//! tasks (e.g., compute tasks or transfer tasks), and the edges are -//! 'control flow' that are small pieces of BraneScript that decide -//! which task to compute next. Can be thought of as a graph with -//! intelligent edges. +//! Defines the Brane Workflow Intermediate Representation (WIR). +//! +//! See +//! +//! for more information. // +// Declare the modules +pub mod builtins; +pub mod data_type; +pub mod func_id; +pub mod locations; +pub mod merge_strategy; + +// Imports use std::collections::{HashMap, HashSet}; use std::fmt::{Display, Formatter, Result as FResult}; use std::hash::Hash; use std::sync::Arc; -use brane_dsl::ParserOptions; -use brane_dsl::spec::MergeStrategy; +use builtins::BuiltinClasses; +use data_type::DataType; use enum_debug::EnumDebug; -use log::debug; +use func_id::FunctionId; +use lazy_static::lazy_static; +use locations::{Location, Locations}; +use merge_strategy::MergeStrategy; use rand::Rng as _; use rand::distr::Alphanumeric; use serde::de::{self, Deserializer, Visitor}; use serde::ser::Serializer; use serde::{Deserialize, Serialize}; use serde_json_any_key::any_key_map; -use specifications::data::{AvailabilityKind, DataIndex, DataName}; -use specifications::package::{Capability, PackageIndex}; -use specifications::version::Version; -use crate::data_type::DataType; -use crate::errors::CompileError; -use crate::func_id::FunctionId; -use crate::locations::{Location, Locations}; -use crate::state::CompileState; -use crate::{CompileResult, compile_snippet}; +use crate::data::{AvailabilityKind, DataName}; +use crate::package::Capability; +use crate::version::Version; /***** CONSTANTS *****/ @@ -123,88 +127,6 @@ impl Workflow { } } - /// Compiles the given worfklow string to a Workflow. - /// - /// # Arguments - /// - `state`: The CompileState to compile with (and to update). - /// - `source`: The collected source string for now. This will be updated with the new snippet. - /// - `pindex`: The PackageIndex to resolve package imports with. - /// - `dindex`: The DataIndex to resolve data instantiations with. - /// - `user`: If given, then this is some tentative identifier of the user receiving the final workflow result. - /// - `options`: The ParseOptions to use. - /// - `what`: A string describing what we're parsing (e.g., a filename, stdin, ...). - /// - `snippet`: The actual snippet to parse. - /// - /// # Returns - /// A new Workflow that is the compiled and executable version of the given snippet. - /// - /// # Errors - /// This function errors if the given string was not a valid workflow. If that's the case, it's also pretty-printed to stdout with source context. - #[allow(clippy::too_many_arguments)] - pub fn from_source( - state: &mut CompileState, - source: &mut String, - pindex: &PackageIndex, - dindex: &DataIndex, - user: Option<&str>, - options: &ParserOptions, - what: impl AsRef, - snippet: impl AsRef, - ) -> Result { - let what: &str = what.as_ref(); - let snippet: &str = snippet.as_ref(); - - // Append the source with the snippet - source.push_str(snippet); - source.push('\n'); - - // Compile the snippet, possibly fetching new ones while at it - let workflow: Workflow = match compile_snippet(state, snippet.as_bytes(), pindex, dindex, options) { - CompileResult::Workflow(mut wf, warns) => { - // Print any warnings to stdout - for w in warns { - w.prettyprint(what, &source); - } - - // Then, inject the username if any - if let Some(user) = user { - debug!("Setting user '{user}' as receiver of final result"); - wf.user = Arc::new(Some(user.into())); - } - - // Done - wf - }, - - CompileResult::Eof(err) => { - // Prettyprint it - err.prettyprint(what, &source); - state.offset += 1 + snippet.chars().filter(|c| *c == '\n').count(); - return Err(CompileError::AstError { what: what.into(), errs: vec![err] }); - }, - CompileResult::Err(errs) => { - // Prettyprint them - for e in &errs { - e.prettyprint(what, &source); - } - state.offset += 1 + snippet.chars().filter(|c| *c == '\n').count(); - return Err(CompileError::AstError { what: what.into(), errs }); - }, - - // Any others should not occur - _ => { - unreachable!(); - }, - }; - debug!("Compiled to workflow:\n\n"); - if log::max_level() == log::LevelFilter::Debug { - crate::traversals::print::ast::do_traversal(&workflow, std::io::stdout()).unwrap(); - } - - // Return - Ok(workflow) - } - // /// Returns the edge pointed to by the given PC. // /// // /// # Arguments @@ -256,51 +178,6 @@ impl Default for Workflow { } } -/// Snippets are parsed sections of workflow that keeps track of the parsed lines -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct Snippet { - pub lines: usize, - pub workflow: Workflow, -} - -impl Snippet { - /// Compiles the given worfklow string to a Snippet. - /// - /// # Arguments - /// - `state`: The CompileState to compile with (and to update). - /// - `source`: The collected source string for now. This will be updated with the new snippet. - /// - `pindex`: The PackageIndex to resolve package imports with. - /// - `dindex`: The DataIndex to resolve data instantiations with. - /// - `user`: If given, then this is some tentative identifier of the user receiving the final workflow result. - /// - `options`: The ParseOptions to use. - /// - `what`: A string describing what we're parsing (e.g., a filename, stdin, ...). - /// - `snippet`: The actual snippet to parse. - /// - /// # Returns - /// A new Workflow that is the compiled and executable version of the given snippet. - /// - /// # Errors - /// This function errors if the given string was not a valid workflow. If that's the case, it's also pretty-printed to stdout with source context. - #[allow(clippy::too_many_arguments)] - pub fn from_source( - state: &mut CompileState, - source: &mut String, - pindex: &PackageIndex, - dindex: &DataIndex, - user: Option<&str>, - options: &ParserOptions, - what: impl AsRef, - snippet: impl AsRef, - ) -> Result { - let snippet = snippet.as_ref(); - - Ok(Self { - lines: 1 + snippet.chars().filter(|c| *c == '\n').count(), - workflow: Workflow::from_source(state, source, pindex, dindex, user, options, what, snippet)?, - }) - } -} - /// Defines a piece of metadata. @@ -624,6 +501,34 @@ pub struct ClassDef { #[serde(rename = "m")] pub methods: Vec, } +impl ClassDef { + /// Creates a new [`ClassDef`] from the given builtin class. + /// + /// # Arguments + /// - `value`: The target [`BuiltinClasses`] we want to create a definition of. + /// - `funcs`: A list of [`FunctionDef`] that are extended based on this class' definition. + /// + /// # Returns + /// A new ClassDef that represents the given `value.` + #[inline] + pub fn from_builtin(value: BuiltinClasses, funcs: &mut Vec) -> Self { + Self { + name: value.name().into(), + package: None, + version: None, + props: value.props().into_iter().map(|(name, dtype)| VarDef { name: (*name).into(), data_type: dtype.clone() }).collect(), + methods: value + .methods() + .into_iter() + .map(|(name, sig)| { + let i: usize = funcs.len(); + funcs.push(FunctionDef { name: (*name).into(), args: sig.0.clone(), ret: sig.1.clone() }); + i + }) + .collect(), + } + } +} diff --git a/specifications/src/working.rs b/specifications/src/working.rs index c50f7c2e..75232559 100644 --- a/specifications/src/working.rs +++ b/specifications/src/working.rs @@ -4,7 +4,7 @@ // Created: // 06 Jan 2023, 15:01:17 // Last edited: -// 07 Mar 2024, 11:58:09 +// 02 May 2025, 15:07:33 // Auto updated? // Yes // @@ -21,6 +21,7 @@ use std::sync::Arc; pub use JobServiceError as Error; use async_trait::async_trait; use futures::Stream; +use policy_reasoner::spec::reasons::ManyReason; use prost::{Enumeration, Message, Oneof}; use tonic::body::{BoxBody, empty_body}; use tonic::client::Grpc as GrpcClient; @@ -30,6 +31,8 @@ use tonic::server::{Grpc as GrpcServer, NamedService, ServerStreamingService, Un use tonic::transport::{Channel, Endpoint}; use tonic::{Code, Request, Response, Status}; +use crate::checking::deliberation::{CheckResponse, CheckTaskRequest, CheckWorkflowRequest, Prost}; + /***** ERRORS *****/ /// Defines the errors occurring when juggling [`PreprocessKind`]s. @@ -232,44 +235,6 @@ pub enum TaskStatus { /***** MESSAGES *****/ -/// Request for checking workflow validity with the worker's checker. -#[derive(Clone, Message)] -pub struct CheckWorkflowRequest { - /// Some identifier relating to the worker which use-case (registry) is being used. - #[prost(tag = "1", required, string)] - pub use_case: String, - /// The workflow that should be checked. - #[prost(tag = "2", required, string)] - pub workflow: String, -} - -/// Request for checking workflow validity with the worker's checker. -#[derive(Clone, Message)] -pub struct CheckTaskRequest { - /// Some identifier relating to the worker which use-case (registry) is being used. - #[prost(tag = "1", required, string)] - pub use_case: String, - /// The workflow that should be checked. - #[prost(tag = "2", required, string)] - pub workflow: String, - /// A pointer to the task in the `workflow` that should be specifically permitted. - #[prost(tag = "3", required, string)] - pub task_id: String, -} - -/// The reply sent by the worker if a workflow- or task is permitted (i.e., as response to [`CheckWorkflowRequest`] or [`CheckTaskRequest`]). -#[derive(Clone, Message)] -pub struct CheckReply { - /// Whether the checker approved or denied - #[prost(tag = "1", required, bool)] - pub verdict: bool, - /// If `verdict` is false, then this _may_ denote a list of reasons for denying it. - #[prost(tag = "2", repeated, string)] - pub reasons: Vec, -} - - - /// Request for preprocessing a given dataset. #[derive(Clone, Message)] pub struct PreprocessRequest { @@ -424,7 +389,10 @@ impl JobServiceClient { /// /// # Errors /// This function errors if either we failed to send the request or the endpoint itself failed to process it. - pub async fn check_workflow(&mut self, request: impl tonic::IntoRequest) -> Result, Status> { + pub async fn check_workflow( + &mut self, + request: impl tonic::IntoRequest>, + ) -> Result>>>, Status> { // Assert the client is ready to get the party started if let Err(err) = self.client.ready().await { return Err(Status::new(Code::Unknown, format!("Service was not ready: {err}"))); @@ -446,7 +414,10 @@ impl JobServiceClient { /// /// # Errors /// This function errors if either we failed to send the request or the endpoint itself failed to process it. - pub async fn check_task(&mut self, request: impl tonic::IntoRequest) -> Result, Status> { + pub async fn check_task( + &mut self, + request: impl tonic::IntoRequest>, + ) -> Result>>>, Status> { // Assert the client is ready to get the party started if let Err(err) = self.client.ready().await { return Err(Status::new(Code::Unknown, format!("Service was not ready: {err}"))); @@ -547,7 +518,10 @@ pub trait JobService: 'static + Send + Sync { /// /// # Errors /// This function may error (i.e., send back a `tonic::Status`) whenever it fails. - async fn check_workflow(&self, request: Request) -> Result, Status>; + async fn check_workflow( + &self, + request: Request>, + ) -> Result>>>, Status>; /// Handle for when a [`CheckTaskRequest`] comes in. /// @@ -559,7 +533,7 @@ pub trait JobService: 'static + Send + Sync { /// /// # Errors /// This function may error (i.e., send back a `tonic::Status`) whenever it fails. - async fn check_task(&self, request: Request) -> Result, Status>; + async fn check_task(&self, request: Request>) -> Result>>>, Status>; /// Handle for when a PreprocessRequest comes in. /// @@ -636,11 +610,11 @@ where "/job.JobService/CheckWorkflow" => { /// Helper struct for the given [`JobService`] that focusses specifically on this request. struct CheckWorkflowSvc(Arc); - impl UnaryService for CheckWorkflowSvc { + impl UnaryService> for CheckWorkflowSvc { type Future = BoxFuture, Status>; - type Response = CheckReply; + type Response = Prost>>; - fn call(&mut self, req: Request) -> Self::Future { + fn call(&mut self, req: Request>) -> Self::Future { // Return the service function as the future to run let service = self.0.clone(); let fut = async move { (*service).check_workflow(req).await }; @@ -662,11 +636,11 @@ where "/job.JobService/CheckTask" => { /// Helper struct for the given [`JobService`] that focusses specifically on this request. struct CheckTaskSvc(Arc); - impl UnaryService for CheckTaskSvc { + impl UnaryService> for CheckTaskSvc { type Future = BoxFuture, Status>; - type Response = CheckReply; + type Response = Prost>>; - fn call(&mut self, req: Request) -> Self::Future { + fn call(&mut self, req: Request>) -> Self::Future { // Return the service function as the future to run let service = self.0.clone(); let fut = async move { (*service).check_task(req).await }; diff --git a/tests/eflint/tautology.eflint b/tests/eflint/tautology.eflint index 6fb71765..69d2639c 100644 --- a/tests/eflint/tautology.eflint +++ b/tests/eflint/tautology.eflint @@ -4,7 +4,7 @@ // Created: // 16 Jan 2024, 13:30:31 // Last edited: -// 16 Jan 2024, 13:30:47 +// 01 May 2025, 17:20:00 // Auto updated? // Yes // @@ -14,5 +14,8 @@ // -// An Invariant that is physically incapable of violating (poor thing) -Invariant tautology When True. +// We will make sure that all questions that are asked of the reasoner are true. +Extend Fact workflow-to-execute Derived from (Foreach workflow: workflow-to-execute(workflow)). +Extend Fact task-to-execute Derived from (Foreach task: task-to-execute(task)). +Extend Fact dataset-to-transfer Derived from (Foreach node-input: dataset-to-transfer(node-input)). +Extend Fact result-to-transfer Derived from (Foreach workflow-result-recipient: result-to-transfer(workflow-result-recipient)). diff --git a/tests/wir/arrays.json b/tests/wir/arrays.json new file mode 100644 index 00000000..f142a7f0 --- /dev/null +++ b/tests/wir/arrays.json @@ -0,0 +1,716 @@ +{ + "id": "workflow-67QyatpA", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "arr1", + "t": { + "kind": "arr", + "t": { + "kind": "int" + } + } + }, + { + "n": "arr2", + "t": { + "kind": "arr", + "t": { + "kind": "int" + } + } + }, + { + "n": "i", + "t": { + "kind": "int" + } + }, + { + "n": "arr_arr", + "t": { + "kind": "arr", + "t": { + "kind": "arr", + "t": { + "kind": "int" + } + } + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "int", + "v": 2 + }, + { + "kind": "int", + "v": 3 + }, + { + "kind": "int", + "v": 4 + }, + { + "kind": "int", + "v": 5 + }, + { + "kind": "arr", + "l": 5, + "t": { + "kind": "int" + } + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "arx", + "t": { + "kind": "int" + } + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "int", + "v": 10 + }, + { + "kind": "int", + "v": 9 + }, + { + "kind": "int", + "v": 8 + }, + { + "kind": "int", + "v": 7 + }, + { + "kind": "int", + "v": 6 + }, + { + "kind": "int", + "v": 5 + }, + { + "kind": "int", + "v": 4 + }, + { + "kind": "int", + "v": 3 + }, + { + "kind": "int", + "v": 2 + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "arr", + "l": 10, + "t": { + "kind": "int" + } + }, + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "vrd", + "d": 2 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "vrs", + "d": 2 + } + ], + "n": 3 + }, + { + "kind": "loop", + "c": 4, + "b": 8, + "n": 27 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 2 + } + ], + "n": 5 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 10 + } + ], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 7 + }, + { + "kind": "brc", + "t": 8, + "f": 27, + "m": 27 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 2 + } + ], + "n": 9 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + } + ], + "n": 10 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 0 + } + ], + "n": 11 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 12 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": ") " + } + ], + "n": 13 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 0 + } + ], + "n": 14 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 15 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 1 + } + ], + "n": 16 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 9 + } + ], + "n": 17 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 2 + } + ], + "n": 18 + }, + { + "kind": "lin", + "i": [ + { + "kind": "sub" + } + ], + "n": 19 + }, + { + "kind": "lin", + "i": [ + { + "kind": "arx", + "t": { + "kind": "int" + } + } + ], + "n": 20 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + } + ], + "n": 21 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 22 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 23 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 2 + } + ], + "n": 24 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + } + ], + "n": 25 + }, + { + "kind": "lin", + "i": [ + { + "kind": "add" + } + ], + "n": 26 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 2 + } + ], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + }, + { + "kind": "int", + "v": 2 + }, + { + "kind": "int", + "v": 3 + }, + { + "kind": "arr", + "l": 3, + "t": { + "kind": "int" + } + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "arx", + "t": { + "kind": "int" + } + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 28 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 29 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 3 + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "int", + "v": 2 + }, + { + "kind": "int", + "v": 3 + }, + { + "kind": "arr", + "l": 3, + "t": { + "kind": "int" + } + }, + { + "kind": "int", + "v": 4 + }, + { + "kind": "int", + "v": 5 + }, + { + "kind": "int", + "v": 6 + }, + { + "kind": "arr", + "l": 3, + "t": { + "kind": "int" + } + }, + { + "kind": "int", + "v": 7 + }, + { + "kind": "int", + "v": 8 + }, + { + "kind": "int", + "v": 9 + }, + { + "kind": "arr", + "l": 3, + "t": { + "kind": "int" + } + }, + { + "kind": "arr", + "l": 3, + "t": { + "kind": "arr", + "t": { + "kind": "int" + } + } + }, + { + "kind": "vrs", + "d": 3 + }, + { + "kind": "vrg", + "d": 3 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "arx", + "t": { + "kind": "arr", + "t": { + "kind": "int" + } + } + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 30 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 31 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 3 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "arx", + "t": { + "kind": "arr", + "t": { + "kind": "int" + } + } + }, + { + "kind": "int", + "v": 2 + }, + { + "kind": "arx", + "t": { + "kind": "int" + } + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 32 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 33 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/attributes.json b/tests/wir/attributes.json new file mode 100644 index 00000000..12bf9a50 --- /dev/null +++ b/tests/wir/attributes.json @@ -0,0 +1,562 @@ +{ + "id": "workflow-Jwm4Zdew", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [ + { + "kind": "cmp", + "p": "hello_world", + "v": "1.0.0", + "d": { + "n": "hello_world", + "a": [], + "r": { + "kind": "str" + } + }, + "a": [], + "r": [] + } + ], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [], + "results": {} + }, + "metadata": [ + "cho.baz" + ], + "user": null, + "graph": [ + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "amy.foo", + "dan.qux" + ], + "n": 1 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 2 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 3 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "bob.bar", + "dan.qux" + ], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 5 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 6 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "bob.bar", + "dan.qux" + ], + "n": 7 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 8 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 9 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux", + "bob.bar" + ], + "n": 10 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 11 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 12 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux", + "bob.bar" + ], + "n": 13 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 14 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 15 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux", + "bob.bar" + ], + "n": 16 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 17 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 18 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux", + "bob.bar" + ], + "n": 19 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 20 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 21 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux" + ], + "n": 22 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 23 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 24 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux" + ], + "n": 25 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 26 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 27 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux", + "eve.quux" + ], + "n": 28 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Hello, world!" + }, + { + "kind": "eq" + } + ], + "n": 29 + }, + { + "kind": "brc", + "t": 30, + "f": 33, + "m": 36 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux", + "eve.quux" + ], + "n": 31 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 32 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 36 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux", + "eve.quux" + ], + "n": 34 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 35 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 36 + }, + { + "kind": "lin", + "i": [], + "n": 37 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux" + ], + "n": 38 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Hello, world!" + }, + { + "kind": "eq" + } + ], + "n": 39 + }, + { + "kind": "brc", + "t": 40, + "f": 43, + "m": 46 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux", + "eve.quux" + ], + "n": 41 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 42 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 46 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [ + "dan.qux" + ], + "n": 44 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 45 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 46 + }, + { + "kind": "lin", + "i": [], + "n": 47 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/average.json b/tests/wir/average.json new file mode 100644 index 00000000..0c59292d --- /dev/null +++ b/tests/wir/average.json @@ -0,0 +1,235 @@ +{ + "id": "workflow-VNprWbCF", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [ + { + "kind": "cmp", + "p": "average", + "v": "1.0.0", + "d": { + "n": "average", + "a": [ + { + "kind": "res" + } + ], + "r": { + "kind": "real" + } + }, + "a": [ + "dataset" + ], + "r": [] + } + ], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "numbers" + }, + { + "kind": "ins", + "d": 0 + }, + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 1 + }, + { + "kind": "nod", + "t": 0, + "l": { + "restricted": [ + "Amy" + ] + }, + "s": null, + "i": { + "{\"Data\":\"numbers\"}": null + }, + "r": null, + "m": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 3 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "numbers" + }, + { + "kind": "ins", + "d": 0 + }, + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 5 + }, + { + "kind": "nod", + "t": 0, + "l": { + "restricted": [ + "Dan" + ] + }, + "s": null, + "i": { + "{\"Data\":\"numbers\"}": null + }, + "r": null, + "m": [], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 7 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 8 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/call.json b/tests/wir/call.json new file mode 100644 index 00000000..18dd45a2 --- /dev/null +++ b/tests/wir/call.json @@ -0,0 +1,204 @@ +{ + "id": "workflow-FziewPZK", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + }, + { + "n": "test", + "a": [], + "r": { + "kind": "void" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Hello there!" + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "General \" Kenobi,\n\tyou are a bold one!" + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 3 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 4 + } + ], + "n": 5 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 4 + } + ], + "n": 7 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 8 + }, + { + "kind": "stp" + } + ], + "funcs": { + "4": [ + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "General Kenobi!!!" + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "ret", + "r": [] + } + ] + } +} diff --git a/tests/wir/class.json b/tests/wir/class.json new file mode 100644 index 00000000..295b653e --- /dev/null +++ b/tests/wir/class.json @@ -0,0 +1,285 @@ +{ + "id": "workflow-1CJqFFYO", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + }, + { + "n": "swoosh", + "a": [ + { + "kind": "clss", + "n": "Jedi" + } + ], + "r": { + "kind": "void" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "Jedi", + "i": null, + "v": null, + "p": [ + { + "n": "is_master", + "t": { + "kind": "bool" + } + }, + { + "n": "lightsaber_colour", + "t": { + "kind": "str" + } + }, + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [ + 4 + ] + } + ], + "vars": [ + { + "n": "self", + "t": { + "kind": "clss", + "n": "Jedi" + } + }, + { + "n": "obi_wan", + "t": { + "kind": "clss", + "n": "Jedi" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "bol", + "v": true + }, + { + "kind": "str", + "v": "blue" + }, + { + "kind": "str", + "v": "Obi-Wan Kenobi" + }, + { + "kind": "ins", + "d": 2 + }, + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "mpp" + }, + { + "kind": "vrg", + "d": 1 + }, + { + "kind": "prj", + "f": "swoosh" + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "dpp" + } + ], + "n": 3 + }, + { + "kind": "stp" + } + ], + "funcs": { + "4": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "prj", + "f": "name" + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "str", + "v": " is swinging their " + }, + { + "kind": "add" + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "prj", + "f": "lightsaber_colour" + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "add" + }, + { + "kind": "str", + "v": " lightsaber!" + }, + { + "kind": "add" + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "ret", + "r": [] + } + ] + } +} diff --git a/tests/wir/comments.json b/tests/wir/comments.json new file mode 100644 index 00000000..bb80e62c --- /dev/null +++ b/tests/wir/comments.json @@ -0,0 +1,98 @@ +{ + "id": "workflow-ShQ4Y0Am", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/cutoff.json b/tests/wir/cutoff.json new file mode 100644 index 00000000..080d6242 --- /dev/null +++ b/tests/wir/cutoff.json @@ -0,0 +1,589 @@ +{ + "id": "workflow-RqDtlTS8", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + }, + { + "n": "hello", + "a": [], + "r": { + "kind": "void" + } + }, + { + "n": "hel", + "a": [ + { + "kind": "any" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "insert", + "a": [ + { + "kind": "any" + } + ], + "r": { + "kind": "void" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "i", + "t": { + "kind": "int" + } + }, + { + "n": "a", + "t": { + "kind": "any" + } + }, + { + "n": "a", + "t": { + "kind": "any" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "vrs", + "d": 0 + } + ], + "n": 1 + }, + { + "kind": "loop", + "c": 2, + "b": 6, + "n": 13 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 3 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 10 + } + ], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 5 + }, + { + "kind": "brc", + "t": 6, + "f": 13, + "m": 13 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "This should be a while loop" + } + ], + "n": 7 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 8 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 9 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 10 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + } + ], + "n": 11 + }, + { + "kind": "lin", + "i": [ + { + "kind": "add" + } + ], + "n": 12 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 0 + } + ], + "n": 2 + }, + { + "kind": "lin", + "i": [], + "n": 14 + }, + { + "kind": "ret", + "r": [] + } + ], + "funcs": { + "5": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "str", + "v": "H" + }, + { + "kind": "fnc", + "d": 0 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 1 + }, + { + "kind": "bol", + "v": true + }, + { + "kind": "eq" + } + ], + "n": 3 + }, + { + "kind": "brc", + "t": 4, + "f": 7, + "m": 8 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "e" + } + ], + "n": 5 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 0 + } + ], + "n": 6 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 8 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "l" + }, + { + "kind": "fnc", + "d": 0 + } + ], + "n": 9 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 10 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 1 + }, + { + "kind": "bol", + "v": true + }, + { + "kind": "eq" + } + ], + "n": 11 + }, + { + "kind": "brc", + "t": 12, + "f": 13, + "m": null + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "ret", + "r": [] + } + ], + "6": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 2 + }, + { + "kind": "vrs", + "d": 2 + }, + { + "kind": "vrg", + "d": 2 + }, + { + "kind": "bol", + "v": true + }, + { + "kind": "eq" + } + ], + "n": 1 + }, + { + "kind": "brc", + "t": 2, + "f": 3, + "m": 6 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "A return statement should be inserted below this one" + } + ], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 0 + } + ], + "n": 5 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 6 + }, + { + "kind": "lin", + "i": [], + "n": 7 + }, + { + "kind": "ret", + "r": [] + } + ], + "4": [ + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "H" + }, + { + "kind": "fnc", + "d": 0 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "e" + }, + { + "kind": "fnc", + "d": 0 + } + ], + "n": 3 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "l" + }, + { + "kind": "fnc", + "d": 0 + } + ], + "n": 5 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "l" + }, + { + "kind": "fnc", + "d": 0 + } + ], + "n": 7 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 8 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "o" + }, + { + "kind": "fnc", + "d": 0 + } + ], + "n": 9 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 10 + }, + { + "kind": "ret", + "r": [] + } + ] + } +} diff --git a/tests/wir/data.json b/tests/wir/data.json new file mode 100644 index 00000000..2f7fc074 --- /dev/null +++ b/tests/wir/data.json @@ -0,0 +1,384 @@ +{ + "id": "workflow-YiM40Idd", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [ + { + "kind": "cmp", + "p": "data_test", + "v": "1.0.0", + "d": { + "n": "run_script", + "a": [ + { + "kind": "res" + } + ], + "r": { + "kind": "res" + } + }, + "a": [ + "dataset" + ], + "r": [] + }, + { + "kind": "cmp", + "p": "data_test", + "v": "1.0.0", + "d": { + "n": "aggregate", + "a": [ + { + "kind": "res" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "res" + } + }, + "a": [ + "local1", + "local2" + ], + "r": [] + } + ], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "ect_umc", + "t": { + "kind": "data" + } + }, + { + "n": "ect_sta", + "t": { + "kind": "data" + } + }, + { + "n": "local", + "t": { + "kind": "arr", + "t": { + "kind": "res" + } + } + }, + { + "n": "res", + "t": { + "kind": "res" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "str", + "v": "Test" + }, + { + "kind": "ins", + "d": 0 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "str", + "v": "Test" + }, + { + "kind": "ins", + "d": 0 + }, + { + "kind": "vrs", + "d": 1 + } + ], + "n": 1 + }, + { + "kind": "par", + "b": [ + 2, + 6 + ], + "m": 10 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 3 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 4 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"Test\"}": null + }, + "r": "result_run_script_d2f46f", + "m": [], + "n": 5 + }, + { + "kind": "ret", + "r": [ + { + "IntermediateResult": "result_run_script_d2f46f" + } + ] + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 1 + } + ], + "n": 7 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 8 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"Test\"}": null + }, + "r": "result_run_script_0d0423", + "m": [], + "n": 9 + }, + { + "kind": "ret", + "r": [ + { + "IntermediateResult": "result_run_script_0d0423" + } + ] + }, + { + "kind": "join", + "m": "All", + "n": 11 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 2 + }, + { + "kind": "vrs", + "d": 2 + }, + { + "kind": "vrd", + "d": 3 + }, + { + "kind": "vrg", + "d": 2 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "arx", + "t": { + "kind": "res" + } + }, + { + "kind": "vrg", + "d": 2 + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "arx", + "t": { + "kind": "res" + } + } + ], + "n": 12 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"IntermediateResult\":\"result_run_script_d2f46f\"}": null, + "{\"IntermediateResult\":\"result_run_script_0d0423\"}": null + }, + "r": "result_aggregate_53762c", + "m": [], + "n": 13 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 3 + }, + { + "kind": "vrg", + "d": 3 + } + ], + "n": 14 + }, + { + "kind": "ret", + "r": [ + { + "IntermediateResult": "result_aggregate_53762c" + } + ] + } + ], + "funcs": {} +} diff --git a/tests/wir/data_complex.json b/tests/wir/data_complex.json new file mode 100644 index 00000000..d18eda85 --- /dev/null +++ b/tests/wir/data_complex.json @@ -0,0 +1,1274 @@ +{ + "id": "workflow-uEZ3baXS", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [ + { + "kind": "cmp", + "p": "copy_result", + "v": "1.1.0", + "d": { + "n": "copy_result", + "a": [ + { + "kind": "res" + } + ], + "r": { + "kind": "res" + } + }, + "a": [ + "result" + ], + "r": [] + }, + { + "kind": "cmp", + "p": "data_test", + "v": "1.0.0", + "d": { + "n": "aggregate", + "a": [ + { + "kind": "res" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "res" + } + }, + "a": [ + "local1", + "local2" + ], + "r": [] + }, + { + "kind": "cmp", + "p": "data_test", + "v": "1.0.0", + "d": { + "n": "run_script", + "a": [ + { + "kind": "res" + } + ], + "r": { + "kind": "res" + } + }, + "a": [ + "dataset" + ], + "r": [] + } + ], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "data0", + "t": { + "kind": "data" + } + }, + { + "n": "res1", + "t": { + "kind": "res" + } + }, + { + "n": "gres1", + "t": { + "kind": "res" + } + }, + { + "n": "data1", + "t": { + "kind": "data" + } + }, + { + "n": "res2", + "t": { + "kind": "res" + } + }, + { + "n": "gres2", + "t": { + "kind": "res" + } + }, + { + "n": "data2", + "t": { + "kind": "data" + } + }, + { + "n": "gres3", + "t": { + "kind": "res" + } + }, + { + "n": "i", + "t": { + "kind": "int" + } + }, + { + "n": "res3", + "t": { + "kind": "res" + } + }, + { + "n": "data3", + "t": { + "kind": "data" + } + }, + { + "n": "gres4", + "t": { + "kind": "res" + } + }, + { + "n": "i", + "t": { + "kind": "int" + } + }, + { + "n": "res4", + "t": { + "kind": "res" + } + }, + { + "n": "data4", + "t": { + "kind": "data" + } + }, + { + "n": "data12", + "t": { + "kind": "res" + } + }, + { + "n": "data34", + "t": { + "kind": "res" + } + }, + { + "n": "data1234", + "t": { + "kind": "res" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "str", + "v": "Test" + }, + { + "kind": "ins", + "d": 0 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 1 + }, + { + "kind": "nod", + "t": 2, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"Test\"}": null + }, + "r": "result_run_script_ea9799", + "m": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "vrd", + "d": 2 + }, + { + "kind": "vrg", + "d": 1 + }, + { + "kind": "vrg", + "d": 1 + } + ], + "n": 3 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"IntermediateResult\":\"result_run_script_ea9799\"}": null + }, + "r": "result_aggregate_fc0e96", + "m": [], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 2 + }, + { + "kind": "vrd", + "d": 3 + }, + { + "kind": "str", + "v": "test_data_1" + }, + { + "kind": "vrg", + "d": 2 + }, + { + "kind": "fnc", + "d": 3 + } + ], + "n": 5 + }, + { + "kind": "cll", + "i": [ + { + "IntermediateResult": "result_aggregate_fc0e96" + } + ], + "r": [ + { + "Data": "test_data_1" + } + ], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 3 + }, + { + "kind": "vrd", + "d": 4 + }, + { + "kind": "int", + "v": 42 + }, + { + "kind": "int", + "v": 32 + }, + { + "kind": "eq" + } + ], + "n": 7 + }, + { + "kind": "brc", + "t": 8, + "f": 12, + "m": 16 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 3 + } + ], + "n": 9 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 10 + }, + { + "kind": "nod", + "t": 2, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"test_data_1\"}": null + }, + "r": "result_run_script_244f30", + "m": [], + "n": 11 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 4 + } + ], + "n": 16 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 3 + } + ], + "n": 13 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 14 + }, + { + "kind": "nod", + "t": 2, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"test_data_1\"}": null + }, + "r": "result_run_script_c20212", + "m": [], + "n": 15 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 4 + } + ], + "n": 16 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 5 + }, + { + "kind": "vrg", + "d": 4 + }, + { + "kind": "vrg", + "d": 4 + } + ], + "n": 17 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"IntermediateResult\":\"result_run_script_244f30\"}": null, + "{\"IntermediateResult\":\"result_run_script_c20212\"}": null + }, + "r": "result_aggregate_00f051", + "m": [], + "n": 18 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 5 + }, + { + "kind": "vrd", + "d": 6 + }, + { + "kind": "str", + "v": "test_data_2" + }, + { + "kind": "vrg", + "d": 5 + }, + { + "kind": "fnc", + "d": 3 + } + ], + "n": 19 + }, + { + "kind": "cll", + "i": [ + { + "IntermediateResult": "result_aggregate_00f051" + } + ], + "r": [ + { + "Data": "test_data_2" + } + ], + "n": 20 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 6 + }, + { + "kind": "vrd", + "d": 7 + }, + { + "kind": "vrd", + "d": 8 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "vrs", + "d": 8 + } + ], + "n": 21 + }, + { + "kind": "loop", + "c": 22, + "b": 26, + "n": 40 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 8 + } + ], + "n": 23 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 10 + } + ], + "n": 24 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 25 + }, + { + "kind": "brc", + "t": 26, + "f": 40, + "m": 40 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 9 + } + ], + "n": 27 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 6 + } + ], + "n": 28 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 29 + }, + { + "kind": "nod", + "t": 2, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"test_data_2\"}": null + }, + "r": "result_run_script_ad9678", + "m": [], + "n": 30 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 9 + } + ], + "n": 31 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 9 + } + ], + "n": 32 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 9 + } + ], + "n": 33 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"IntermediateResult\":\"result_run_script_ad9678\"}": null + }, + "r": "result_aggregate_0c3097", + "m": [], + "n": 34 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 7 + } + ], + "n": 35 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 8 + } + ], + "n": 36 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + } + ], + "n": 37 + }, + { + "kind": "lin", + "i": [ + { + "kind": "add" + } + ], + "n": 38 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 8 + } + ], + "n": 39 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vru", + "d": 9 + } + ], + "n": 22 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 10 + }, + { + "kind": "str", + "v": "test_data_3" + }, + { + "kind": "vrg", + "d": 7 + }, + { + "kind": "fnc", + "d": 3 + } + ], + "n": 41 + }, + { + "kind": "cll", + "i": [ + { + "IntermediateResult": "result_aggregate_0c3097" + } + ], + "r": [ + { + "Data": "test_data_3" + } + ], + "n": 42 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 10 + }, + { + "kind": "vrd", + "d": 11 + }, + { + "kind": "vrg", + "d": 10 + }, + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 43 + }, + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"test_data_3\"}": null + }, + "r": "result_copy_result_822146", + "m": [], + "n": 44 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 11 + }, + { + "kind": "vrd", + "d": 12 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "vrs", + "d": 12 + } + ], + "n": 45 + }, + { + "kind": "loop", + "c": 46, + "b": 50, + "n": 63 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 12 + } + ], + "n": 47 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 10 + } + ], + "n": 48 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 49 + }, + { + "kind": "brc", + "t": 50, + "f": 63, + "m": 63 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 13 + } + ], + "n": 51 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 11 + } + ], + "n": 52 + }, + { + "kind": "nod", + "t": 2, + "l": "all", + "s": null, + "i": { + "{\"IntermediateResult\":\"result_copy_result_822146\"}": null, + "{\"IntermediateResult\":\"result_aggregate_28e9be\"}": null + }, + "r": "result_run_script_78b2bf", + "m": [], + "n": 53 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 13 + } + ], + "n": 54 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 13 + } + ], + "n": 55 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 13 + } + ], + "n": 56 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"IntermediateResult\":\"result_run_script_78b2bf\"}": null + }, + "r": "result_aggregate_28e9be", + "m": [], + "n": 57 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 11 + } + ], + "n": 58 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 12 + } + ], + "n": 59 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + } + ], + "n": 60 + }, + { + "kind": "lin", + "i": [ + { + "kind": "add" + } + ], + "n": 61 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 12 + } + ], + "n": 62 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vru", + "d": 13 + } + ], + "n": 46 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 14 + }, + { + "kind": "str", + "v": "test_data_4" + }, + { + "kind": "vrg", + "d": 11 + }, + { + "kind": "fnc", + "d": 3 + } + ], + "n": 64 + }, + { + "kind": "cll", + "i": [ + { + "IntermediateResult": "result_aggregate_28e9be" + }, + { + "IntermediateResult": "result_copy_result_822146" + } + ], + "r": [ + { + "Data": "test_data_4" + } + ], + "n": 65 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 14 + }, + { + "kind": "vrd", + "d": 15 + }, + { + "kind": "vrg", + "d": 3 + }, + { + "kind": "cst", + "t": { + "kind": "res" + } + }, + { + "kind": "vrg", + "d": 6 + }, + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 66 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"test_data_2\"}": null, + "{\"Data\":\"test_data_1\"}": null + }, + "r": "result_aggregate_3ace7c", + "m": [], + "n": 67 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 15 + }, + { + "kind": "vrd", + "d": 16 + }, + { + "kind": "vrg", + "d": 10 + }, + { + "kind": "cst", + "t": { + "kind": "res" + } + }, + { + "kind": "vrg", + "d": 14 + }, + { + "kind": "cst", + "t": { + "kind": "res" + } + } + ], + "n": 68 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"test_data_4\"}": null, + "{\"Data\":\"test_data_3\"}": null + }, + "r": "result_aggregate_b49d89", + "m": [], + "n": 69 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 16 + }, + { + "kind": "vrd", + "d": 17 + }, + { + "kind": "vrg", + "d": 15 + }, + { + "kind": "vrg", + "d": 16 + } + ], + "n": 70 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"IntermediateResult\":\"result_aggregate_b49d89\"}": null, + "{\"IntermediateResult\":\"result_aggregate_3ace7c\"}": null + }, + "r": "result_aggregate_4b2885", + "m": [], + "n": 71 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 17 + }, + { + "kind": "str", + "v": "test_data_1234" + }, + { + "kind": "vrg", + "d": 17 + }, + { + "kind": "fnc", + "d": 3 + } + ], + "n": 72 + }, + { + "kind": "cll", + "i": [ + { + "IntermediateResult": "result_aggregate_4b2885" + } + ], + "r": [ + { + "Data": "test_data_1234" + } + ], + "n": 73 + }, + { + "kind": "ret", + "r": [ + { + "Data": "test_data_1234" + } + ] + } + ], + "funcs": {} +} diff --git a/tests/wir/delayed_initialization.json b/tests/wir/delayed_initialization.json new file mode 100644 index 00000000..648ca450 --- /dev/null +++ b/tests/wir/delayed_initialization.json @@ -0,0 +1,143 @@ +{ + "id": "workflow-imsiaQvm", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "test", + "t": { + "kind": "int" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "int", + "v": 42 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/eflint/README.md b/tests/wir/eflint/README.md new file mode 100644 index 00000000..0c44091e --- /dev/null +++ b/tests/wir/eflint/README.md @@ -0,0 +1,13 @@ +# eFLINT Test Cases for WIR -> eFLINT compiler + +This folder contains the "golden" answers for the compilation from WIR files to eFLINT. + +Note that, typically, the compilation from WIR -> Workflow (which happens first) is very lossy; only interesting things like branches, loops and task calls are kept. + +Further, this process attempts to eliminate any BraneScript function calls, as these are not supported by the Workflow. + +As such, the following WIR files are not supported: +- `class.json` is not supported because the analysis can't extract the called function from projection; and +- `recursion.json` because recursive functions cannot be inlined. + +The first one can be solved by implementing a more complex analysis algorithm that extracts called function IDs based on stack emulation instead of the "looking at the previous instruction and guessing"-kind of analysis. But that's a TODO. diff --git a/tests/wir/eflint/arrays.eflint b/tests/wir/eflint/arrays.eflint new file mode 100644 index 00000000..4f491989 --- /dev/null +++ b/tests/wir/eflint/arrays.eflint @@ -0,0 +1,3 @@ ++workflow("workflow-67QyatpA"). ++node(workflow("workflow-67QyatpA"), "workflow-67QyatpA-3Vq5-loop"). ++loop(node(workflow("workflow-67QyatpA"), "workflow-67QyatpA-3Vq5-loop")). diff --git a/tests/wir/eflint/attributes.eflint b/tests/wir/eflint/attributes.eflint new file mode 100644 index 00000000..827ec6fc --- /dev/null +++ b/tests/wir/eflint/attributes.eflint @@ -0,0 +1,154 @@ ++workflow("workflow-Jwm4Zdew"). ++tag("cho:baz"). ++signature(user(""), ""). ++metadata(tag("cho:baz"), signature(user(""), "")). ++workflow-metadata(workflow("workflow-Jwm4Zdew"), metadata(tag("cho:baz"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:0-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:0-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:0-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:0-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++tag("amy:foo"). ++signature(user(""), ""). ++metadata(tag("amy:foo"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:0-task"), metadata(tag("amy:foo"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:3-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:3-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:3-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("bob:bar"). ++signature(user(""), ""). ++metadata(tag("bob:bar"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:3-task"), metadata(tag("bob:bar"), signature(user(""), ""))). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:3-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:6-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:6-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:6-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("bob:bar"). ++signature(user(""), ""). ++metadata(tag("bob:bar"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:6-task"), metadata(tag("bob:bar"), signature(user(""), ""))). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:6-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:9-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:9-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:9-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("bob:bar"). ++signature(user(""), ""). ++metadata(tag("bob:bar"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:9-task"), metadata(tag("bob:bar"), signature(user(""), ""))). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:9-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:12-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:12-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:12-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("bob:bar"). ++signature(user(""), ""). ++metadata(tag("bob:bar"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:12-task"), metadata(tag("bob:bar"), signature(user(""), ""))). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:12-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:15-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:15-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:15-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("bob:bar"). ++signature(user(""), ""). ++metadata(tag("bob:bar"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:15-task"), metadata(tag("bob:bar"), signature(user(""), ""))). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:15-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:18-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:18-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:18-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("bob:bar"). ++signature(user(""), ""). ++metadata(tag("bob:bar"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:18-task"), metadata(tag("bob:bar"), signature(user(""), ""))). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:18-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:21-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:21-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:21-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:21-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:24-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:24-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:24-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:24-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:27-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:27-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:27-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:27-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++tag("eve:quux"). ++signature(user(""), ""). ++metadata(tag("eve:quux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:27-task"), metadata(tag("eve:quux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:30-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:30-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:30-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:30-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++tag("eve:quux"). ++signature(user(""), ""). ++metadata(tag("eve:quux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:30-task"), metadata(tag("eve:quux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:33-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:33-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:33-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("eve:quux"). ++signature(user(""), ""). ++metadata(tag("eve:quux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:33-task"), metadata(tag("eve:quux"), signature(user(""), ""))). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:33-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:37-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:37-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:37-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:37-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:40-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:40-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:40-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:40-task"), metadata(tag("dan:qux"), signature(user(""), ""))). ++tag("eve:quux"). ++signature(user(""), ""). ++metadata(tag("eve:quux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:40-task"), metadata(tag("eve:quux"), signature(user(""), ""))). ++node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:43-task"). ++node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:43-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:43-task"), asset("hello_world[1.0.0]")), "hello_world"). ++tag("dan:qux"). ++signature(user(""), ""). ++metadata(tag("dan:qux"), signature(user(""), "")). ++node-metadata(node(workflow("workflow-Jwm4Zdew"), "workflow-Jwm4Zdew-
:43-task"), metadata(tag("dan:qux"), signature(user(""), ""))). diff --git a/tests/wir/eflint/average.eflint b/tests/wir/eflint/average.eflint new file mode 100644 index 00000000..6decd711 --- /dev/null +++ b/tests/wir/eflint/average.eflint @@ -0,0 +1,9 @@ ++workflow("workflow-VNprWbCF"). ++node(workflow("workflow-VNprWbCF"), "workflow-VNprWbCF-
:1-task"). ++node-input(node(workflow("workflow-VNprWbCF"), "workflow-VNprWbCF-
:1-task"), asset("average[1.0.0]")). ++function(node-input(node(workflow("workflow-VNprWbCF"), "workflow-VNprWbCF-
:1-task"), asset("average[1.0.0]")), "average"). ++node-input(node(workflow("workflow-VNprWbCF"), "workflow-VNprWbCF-
:1-task"), asset("numbers")). ++node(workflow("workflow-VNprWbCF"), "workflow-VNprWbCF-
:5-task"). ++node-input(node(workflow("workflow-VNprWbCF"), "workflow-VNprWbCF-
:5-task"), asset("average[1.0.0]")). ++function(node-input(node(workflow("workflow-VNprWbCF"), "workflow-VNprWbCF-
:5-task"), asset("average[1.0.0]")), "average"). ++node-input(node(workflow("workflow-VNprWbCF"), "workflow-VNprWbCF-
:5-task"), asset("numbers")). diff --git a/tests/wir/eflint/call.eflint b/tests/wir/eflint/call.eflint new file mode 100644 index 00000000..b8eb5e03 --- /dev/null +++ b/tests/wir/eflint/call.eflint @@ -0,0 +1 @@ ++workflow("workflow-FziewPZK"). diff --git a/tests/wir/eflint/comments.eflint b/tests/wir/eflint/comments.eflint new file mode 100644 index 00000000..da101a01 --- /dev/null +++ b/tests/wir/eflint/comments.eflint @@ -0,0 +1 @@ ++workflow("workflow-ShQ4Y0Am"). diff --git a/tests/wir/eflint/cutoff.eflint b/tests/wir/eflint/cutoff.eflint new file mode 100644 index 00000000..29a3d9a0 --- /dev/null +++ b/tests/wir/eflint/cutoff.eflint @@ -0,0 +1,4 @@ ++workflow("workflow-RqDtlTS8"). ++node(workflow("workflow-RqDtlTS8"), "workflow-RqDtlTS8-gM7D-loop"). ++loop(node(workflow("workflow-RqDtlTS8"), "workflow-RqDtlTS8-gM7D-loop")). ++node(workflow("workflow-RqDtlTS8"), "workflow-RqDtlTS8-
:14-return"). diff --git a/tests/wir/eflint/data.eflint b/tests/wir/eflint/data.eflint new file mode 100644 index 00000000..bb0acf1d --- /dev/null +++ b/tests/wir/eflint/data.eflint @@ -0,0 +1,20 @@ ++workflow("workflow-YiM40Idd"). ++node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:4-task"). ++node-input(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:4-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:4-task"), asset("data_test[1.0.0]")), "run_script"). ++node-input(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:4-task"), asset("Test")). ++node-output(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:4-task"), asset("result_run_script_d2f46f")). ++node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:5-return"). ++node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:8-task"). ++node-input(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:8-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:8-task"), asset("data_test[1.0.0]")), "run_script"). ++node-input(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:8-task"), asset("Test")). ++node-output(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:8-task"), asset("result_run_script_0d0423")). ++node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:9-return"). ++node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:12-task"). ++node-input(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:12-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:12-task"), asset("data_test[1.0.0]")), "aggregate"). ++node-input(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:12-task"), asset("result_run_script_d2f46f")). ++node-input(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:12-task"), asset("result_run_script_0d0423")). ++node-output(node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:12-task"), asset("result_aggregate_53762c")). ++node(workflow("workflow-YiM40Idd"), "workflow-YiM40Idd-
:14-return"). diff --git a/tests/wir/eflint/data_complex.eflint b/tests/wir/eflint/data_complex.eflint new file mode 100644 index 00000000..9c7d69c5 --- /dev/null +++ b/tests/wir/eflint/data_complex.eflint @@ -0,0 +1,114 @@ ++workflow("workflow-uEZ3baXS"). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:1-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:1-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:1-task"), asset("data_test[1.0.0]")), "run_script"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:1-task"), asset("Test")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:1-task"), asset("result_run_script_ea9799")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:3-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:3-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:3-task"), asset("data_test[1.0.0]")), "aggregate"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:3-task"), asset("result_run_script_ea9799")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:3-task"), asset("result_aggregate_fc0e96")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:5-commit"). ++commit(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:5-commit")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:5-commit"), asset("__brane_internals")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:5-commit"), asset("__brane_internals")), "commit"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:5-commit"), asset("result_aggregate_fc0e96")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:5-commit"), asset("test_data_1")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:10-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:10-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:10-task"), asset("data_test[1.0.0]")), "run_script"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:10-task"), asset("test_data_1")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:10-task"), asset("result_run_script_244f30")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:14-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:14-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:14-task"), asset("data_test[1.0.0]")), "run_script"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:14-task"), asset("test_data_1")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:14-task"), asset("result_run_script_c20212")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:17-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:17-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:17-task"), asset("data_test[1.0.0]")), "aggregate"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:17-task"), asset("result_run_script_244f30")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:17-task"), asset("result_run_script_c20212")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:17-task"), asset("result_aggregate_00f051")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:19-commit"). ++commit(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:19-commit")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:19-commit"), asset("__brane_internals")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:19-commit"), asset("__brane_internals")), "commit"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:19-commit"), asset("result_aggregate_00f051")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:19-commit"), asset("test_data_2")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:29-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:29-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:29-task"), asset("data_test[1.0.0]")), "run_script"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:29-task"), asset("test_data_2")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:29-task"), asset("result_run_script_ad9678")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:33-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:33-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:33-task"), asset("data_test[1.0.0]")), "aggregate"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:33-task"), asset("result_run_script_ad9678")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:33-task"), asset("result_aggregate_0c3097")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-yzpR-loop"). ++loop(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-yzpR-loop")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-yzpR-loop"), asset("test_data_2")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-yzpR-loop"), asset("result_aggregate_0c3097")). ++loop-body(loop(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-yzpR-loop")), node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:29-task")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:41-commit"). ++commit(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:41-commit")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:41-commit"), asset("__brane_internals")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:41-commit"), asset("__brane_internals")), "commit"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:41-commit"), asset("result_aggregate_0c3097")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:41-commit"), asset("test_data_3")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:43-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:43-task"), asset("copy_result[1.1.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:43-task"), asset("copy_result[1.1.0]")), "copy_result"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:43-task"), asset("test_data_3")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:43-task"), asset("result_copy_result_822146")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:52-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:52-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:52-task"), asset("data_test[1.0.0]")), "run_script"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:52-task"), asset("result_copy_result_822146")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:52-task"), asset("result_aggregate_28e9be")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:52-task"), asset("result_run_script_78b2bf")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:56-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:56-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:56-task"), asset("data_test[1.0.0]")), "aggregate"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:56-task"), asset("result_run_script_78b2bf")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:56-task"), asset("result_aggregate_28e9be")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-7wrX-loop"). ++loop(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-7wrX-loop")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-7wrX-loop"), asset("result_aggregate_28e9be")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-7wrX-loop"), asset("result_copy_result_822146")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-7wrX-loop"), asset("result_aggregate_28e9be")). ++loop-body(loop(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-7wrX-loop")), node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:52-task")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:64-commit"). ++commit(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:64-commit")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:64-commit"), asset("__brane_internals")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:64-commit"), asset("__brane_internals")), "commit"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:64-commit"), asset("result_aggregate_28e9be")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:64-commit"), asset("result_copy_result_822146")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:64-commit"), asset("test_data_4")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:66-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:66-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:66-task"), asset("data_test[1.0.0]")), "aggregate"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:66-task"), asset("test_data_2")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:66-task"), asset("test_data_1")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:66-task"), asset("result_aggregate_3ace7c")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:68-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:68-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:68-task"), asset("data_test[1.0.0]")), "aggregate"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:68-task"), asset("test_data_4")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:68-task"), asset("test_data_3")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:68-task"), asset("result_aggregate_b49d89")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:70-task"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:70-task"), asset("data_test[1.0.0]")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:70-task"), asset("data_test[1.0.0]")), "aggregate"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:70-task"), asset("result_aggregate_3ace7c")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:70-task"), asset("result_aggregate_b49d89")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:70-task"), asset("result_aggregate_4b2885")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:72-commit"). ++commit(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:72-commit")). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:72-commit"), asset("__brane_internals")). ++function(node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:72-commit"), asset("__brane_internals")), "commit"). ++node-input(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:72-commit"), asset("result_aggregate_4b2885")). ++node-output(node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:72-commit"), asset("test_data_1234")). ++node(workflow("workflow-uEZ3baXS"), "workflow-uEZ3baXS-
:73-return"). diff --git a/tests/wir/eflint/delayed_initialization.eflint b/tests/wir/eflint/delayed_initialization.eflint new file mode 100644 index 00000000..4b79161c --- /dev/null +++ b/tests/wir/eflint/delayed_initialization.eflint @@ -0,0 +1 @@ ++workflow("workflow-imsiaQvm"). diff --git a/tests/wir/eflint/empty.eflint b/tests/wir/eflint/empty.eflint new file mode 100644 index 00000000..d337c2c8 --- /dev/null +++ b/tests/wir/eflint/empty.eflint @@ -0,0 +1 @@ ++workflow("workflow-TAx9GXQW"). diff --git a/tests/wir/eflint/epi.eflint b/tests/wir/eflint/epi.eflint new file mode 100644 index 00000000..a82daeea --- /dev/null +++ b/tests/wir/eflint/epi.eflint @@ -0,0 +1,24 @@ ++workflow("workflow-s11A4qQu"). ++node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:1-task"). ++node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:1-task"), asset("epi[1.0.0]")). ++function(node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:1-task"), asset("epi[1.0.0]")), "local_compute"). ++node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:1-task"), asset("st_antonius_ect")). ++node-output(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:1-task"), asset("result_local_compute_725aba")). ++node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:3-task"). ++node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:3-task"), asset("epi[1.0.0]")). ++function(node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:3-task"), asset("epi[1.0.0]")), "local_compute"). ++node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:3-task"), asset("umc_utrecht_ect")). ++node-output(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:3-task"), asset("result_local_compute_33cfc9")). ++node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:5-task"). ++node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:5-task"), asset("epi[1.0.0]")). ++function(node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:5-task"), asset("epi[1.0.0]")), "aggregate"). ++node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:5-task"), asset("result_local_compute_725aba")). ++node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:5-task"), asset("result_local_compute_33cfc9")). ++node-output(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:5-task"), asset("result_aggregate_fad847")). ++node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:7-commit"). ++commit(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:7-commit")). ++node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:7-commit"), asset("__brane_internals")). ++function(node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:7-commit"), asset("__brane_internals")), "commit"). ++node-input(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:7-commit"), asset("result_aggregate_fad847")). ++node-output(node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:7-commit"), asset("surf_res")). ++node(workflow("workflow-s11A4qQu"), "workflow-s11A4qQu-
:8-return"). diff --git a/tests/wir/eflint/epi_one.eflint b/tests/wir/eflint/epi_one.eflint new file mode 100644 index 00000000..f12fbf21 --- /dev/null +++ b/tests/wir/eflint/epi_one.eflint @@ -0,0 +1,18 @@ ++workflow("workflow-t5mblXM9"). ++node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:1-task"). ++node-input(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:1-task"), asset("epi[1.0.0]")). ++function(node-input(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:1-task"), asset("epi[1.0.0]")), "local_compute"). ++node-input(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:1-task"), asset("umc_utrecht_ect")). ++node-output(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:1-task"), asset("result_local_compute_6f5828")). ++node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:3-task"). ++node-input(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:3-task"), asset("epi[1.0.0]")). ++function(node-input(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:3-task"), asset("epi[1.0.0]")), "aggregate"). ++node-input(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:3-task"), asset("result_local_compute_6f5828")). ++node-output(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:3-task"), asset("result_aggregate_596814")). ++node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:5-commit"). ++commit(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:5-commit")). ++node-input(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:5-commit"), asset("__brane_internals")). ++function(node-input(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:5-commit"), asset("__brane_internals")), "commit"). ++node-input(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:5-commit"), asset("result_aggregate_596814")). ++node-output(node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:5-commit"), asset("surf_res")). ++node(workflow("workflow-t5mblXM9"), "workflow-t5mblXM9-
:6-return"). diff --git a/tests/wir/eflint/for.eflint b/tests/wir/eflint/for.eflint new file mode 100644 index 00000000..83795d7b --- /dev/null +++ b/tests/wir/eflint/for.eflint @@ -0,0 +1,5 @@ ++workflow("workflow-GY9R6G7l"). ++node(workflow("workflow-GY9R6G7l"), "workflow-GY9R6G7l-sMbj-loop"). ++loop(node(workflow("workflow-GY9R6G7l"), "workflow-GY9R6G7l-sMbj-loop")). ++node(workflow("workflow-GY9R6G7l"), "workflow-GY9R6G7l-7hCC-loop"). ++loop(node(workflow("workflow-GY9R6G7l"), "workflow-GY9R6G7l-7hCC-loop")). diff --git a/tests/wir/eflint/function.eflint b/tests/wir/eflint/function.eflint new file mode 100644 index 00000000..30ed25f5 --- /dev/null +++ b/tests/wir/eflint/function.eflint @@ -0,0 +1 @@ ++workflow("workflow-FcbMdsmc"). diff --git a/tests/wir/eflint/hello_world.eflint b/tests/wir/eflint/hello_world.eflint new file mode 100644 index 00000000..9ac3f2de --- /dev/null +++ b/tests/wir/eflint/hello_world.eflint @@ -0,0 +1,4 @@ ++workflow("workflow-rTqVdnt5"). ++node(workflow("workflow-rTqVdnt5"), "workflow-rTqVdnt5-
:0-task"). ++node-input(node(workflow("workflow-rTqVdnt5"), "workflow-rTqVdnt5-
:0-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-rTqVdnt5"), "workflow-rTqVdnt5-
:0-task"), asset("hello_world[1.0.0]")), "hello_world"). diff --git a/tests/wir/eflint/if.eflint b/tests/wir/eflint/if.eflint new file mode 100644 index 00000000..690e8900 --- /dev/null +++ b/tests/wir/eflint/if.eflint @@ -0,0 +1 @@ ++workflow("workflow-EkqxZ2Mp"). diff --git a/tests/wir/eflint/if_complex.eflint b/tests/wir/eflint/if_complex.eflint new file mode 100644 index 00000000..3887ed61 --- /dev/null +++ b/tests/wir/eflint/if_complex.eflint @@ -0,0 +1 @@ ++workflow("workflow-Ut3vTgpJ"). diff --git a/tests/wir/eflint/import.eflint b/tests/wir/eflint/import.eflint new file mode 100644 index 00000000..fa3a3836 --- /dev/null +++ b/tests/wir/eflint/import.eflint @@ -0,0 +1,4 @@ ++workflow("workflow-OyxK53zY"). ++node(workflow("workflow-OyxK53zY"), "workflow-OyxK53zY-
:0-task"). ++node-input(node(workflow("workflow-OyxK53zY"), "workflow-OyxK53zY-
:0-task"), asset("test[1.0.0]")). ++function(node-input(node(workflow("workflow-OyxK53zY"), "workflow-OyxK53zY-
:0-task"), asset("test[1.0.0]")), "hello_world"). diff --git a/tests/wir/eflint/math.eflint b/tests/wir/eflint/math.eflint new file mode 100644 index 00000000..19fbb485 --- /dev/null +++ b/tests/wir/eflint/math.eflint @@ -0,0 +1 @@ ++workflow("workflow-sGeqOere"). diff --git a/tests/wir/eflint/metadata.eflint b/tests/wir/eflint/metadata.eflint new file mode 100644 index 00000000..abb0ac88 --- /dev/null +++ b/tests/wir/eflint/metadata.eflint @@ -0,0 +1,4 @@ ++workflow("workflow-OOZguqrv"). ++node(workflow("workflow-OOZguqrv"), "workflow-OOZguqrv-
:0-task"). ++node-input(node(workflow("workflow-OOZguqrv"), "workflow-OOZguqrv-
:0-task"), asset("hello_world[1.0.0]")). ++function(node-input(node(workflow("workflow-OOZguqrv"), "workflow-OOZguqrv-
:0-task"), asset("hello_world[1.0.0]")), "hello_world"). diff --git a/tests/wir/eflint/on.eflint b/tests/wir/eflint/on.eflint new file mode 100644 index 00000000..a95ab18d --- /dev/null +++ b/tests/wir/eflint/on.eflint @@ -0,0 +1,13 @@ ++workflow("workflow-bozR4uTg"). ++node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:0-task"). ++node-input(node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:0-task"), asset("test[1.0.0]")). ++function(node-input(node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:0-task"), asset("test[1.0.0]")), "hello_world"). ++node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:3-task"). ++node-input(node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:3-task"), asset("test[1.0.0]")). ++function(node-input(node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:3-task"), asset("test[1.0.0]")), "hello_world"). ++node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:6-task"). ++node-input(node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:6-task"), asset("test[1.0.0]")). ++function(node-input(node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:6-task"), asset("test[1.0.0]")), "hello_world"). ++node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:9-task"). ++node-input(node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:9-task"), asset("test[1.0.0]")). ++function(node-input(node(workflow("workflow-bozR4uTg"), "workflow-bozR4uTg-
:9-task"), asset("test[1.0.0]")), "hello_world"). diff --git a/tests/wir/eflint/parallel.eflint b/tests/wir/eflint/parallel.eflint new file mode 100644 index 00000000..dae0418b --- /dev/null +++ b/tests/wir/eflint/parallel.eflint @@ -0,0 +1,11 @@ ++workflow("workflow-pRA42iRy"). ++node(workflow("workflow-pRA42iRy"), "workflow-pRA42iRy-
:4-return"). ++node(workflow("workflow-pRA42iRy"), "workflow-pRA42iRy-
:8-return"). ++node(workflow("workflow-pRA42iRy"), "workflow-pRA42iRy-
:12-return"). ++node(workflow("workflow-pRA42iRy"), "workflow-pRA42iRy-
:16-return"). ++node(workflow("workflow-pRA42iRy"), "workflow-pRA42iRy-
:22-return"). ++node(workflow("workflow-pRA42iRy"), "workflow-pRA42iRy-
:24-return"). ++node(workflow("workflow-pRA42iRy"), "workflow-pRA42iRy-
:26-return"). ++node(workflow("workflow-pRA42iRy"), "workflow-pRA42iRy-
:28-return"). ++node(workflow("workflow-pRA42iRy"), "workflow-pRA42iRy-
:36-return"). ++node(workflow("workflow-pRA42iRy"), "workflow-pRA42iRy-
:40-return"). diff --git a/tests/wir/eflint/scopes.eflint b/tests/wir/eflint/scopes.eflint new file mode 100644 index 00000000..5a3a2c95 --- /dev/null +++ b/tests/wir/eflint/scopes.eflint @@ -0,0 +1 @@ ++workflow("workflow-5UrAS8ep"). diff --git a/tests/wir/eflint/vars.eflint b/tests/wir/eflint/vars.eflint new file mode 100644 index 00000000..a9b57d1e --- /dev/null +++ b/tests/wir/eflint/vars.eflint @@ -0,0 +1 @@ ++workflow("workflow-T2lu5VhH"). diff --git a/tests/wir/eflint/while.eflint b/tests/wir/eflint/while.eflint new file mode 100644 index 00000000..1188e41b --- /dev/null +++ b/tests/wir/eflint/while.eflint @@ -0,0 +1,3 @@ ++workflow("workflow-Wx0xKTBQ"). ++node(workflow("workflow-Wx0xKTBQ"), "workflow-Wx0xKTBQ-Vnde-loop"). ++loop(node(workflow("workflow-Wx0xKTBQ"), "workflow-Wx0xKTBQ-Vnde-loop")). diff --git a/tests/wir/empty.json b/tests/wir/empty.json new file mode 100644 index 00000000..cb47eb43 --- /dev/null +++ b/tests/wir/empty.json @@ -0,0 +1,98 @@ +{ + "id": "workflow-TAx9GXQW", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/epi.json b/tests/wir/epi.json new file mode 100644 index 00000000..e70e1907 --- /dev/null +++ b/tests/wir/epi.json @@ -0,0 +1,306 @@ +{ + "id": "workflow-s11A4qQu", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [ + { + "kind": "cmp", + "p": "epi", + "v": "1.0.0", + "d": { + "n": "aggregate", + "a": [ + { + "kind": "res" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "res" + } + }, + "a": [ + "lhs", + "rhs" + ], + "r": [] + }, + { + "kind": "cmp", + "p": "epi", + "v": "1.0.0", + "d": { + "n": "local_compute", + "a": [ + { + "kind": "data" + } + ], + "r": { + "kind": "res" + } + }, + "a": [ + "input" + ], + "r": [] + } + ], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "res_sta", + "t": { + "kind": "res" + } + }, + { + "n": "res_umc", + "t": { + "kind": "res" + } + }, + { + "n": "res", + "t": { + "kind": "res" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "str", + "v": "st_antonius_ect" + }, + { + "kind": "ins", + "d": 0 + } + ], + "n": 1 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"st_antonius_ect\"}": null + }, + "r": "result_local_compute_725aba", + "m": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "str", + "v": "umc_utrecht_ect" + }, + { + "kind": "ins", + "d": 0 + } + ], + "n": 3 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"umc_utrecht_ect\"}": null + }, + "r": "result_local_compute_33cfc9", + "m": [], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "vrd", + "d": 2 + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "vrg", + "d": 1 + } + ], + "n": 5 + }, + { + "kind": "nod", + "t": 0, + "l": { + "restricted": [ + "surf" + ] + }, + "s": null, + "i": { + "{\"IntermediateResult\":\"result_local_compute_725aba\"}": null, + "{\"IntermediateResult\":\"result_local_compute_33cfc9\"}": null + }, + "r": "result_aggregate_fad847", + "m": [], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 2 + }, + { + "kind": "str", + "v": "surf_res" + }, + { + "kind": "vrg", + "d": 2 + }, + { + "kind": "fnc", + "d": 3 + } + ], + "n": 7 + }, + { + "kind": "cll", + "i": [ + { + "IntermediateResult": "result_aggregate_fad847" + } + ], + "r": [ + { + "Data": "surf_res" + } + ], + "n": 8 + }, + { + "kind": "ret", + "r": [ + { + "Data": "surf_res" + } + ] + } + ], + "funcs": {} +} diff --git a/tests/wir/epi_one.json b/tests/wir/epi_one.json new file mode 100644 index 00000000..8be7ffaf --- /dev/null +++ b/tests/wir/epi_one.json @@ -0,0 +1,265 @@ +{ + "id": "workflow-t5mblXM9", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [ + { + "kind": "cmp", + "p": "epi", + "v": "1.0.0", + "d": { + "n": "aggregate", + "a": [ + { + "kind": "res" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "res" + } + }, + "a": [ + "lhs", + "rhs" + ], + "r": [] + }, + { + "kind": "cmp", + "p": "epi", + "v": "1.0.0", + "d": { + "n": "local_compute", + "a": [ + { + "kind": "data" + } + ], + "r": { + "kind": "res" + } + }, + "a": [ + "input" + ], + "r": [] + } + ], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "res_umc", + "t": { + "kind": "res" + } + }, + { + "n": "res", + "t": { + "kind": "res" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "str", + "v": "umc_utrecht_ect" + }, + { + "kind": "ins", + "d": 0 + } + ], + "n": 1 + }, + { + "kind": "nod", + "t": 1, + "l": "all", + "s": null, + "i": { + "{\"Data\":\"umc_utrecht_ect\"}": null + }, + "r": "result_local_compute_6f5828", + "m": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "vrg", + "d": 0 + } + ], + "n": 3 + }, + { + "kind": "nod", + "t": 0, + "l": { + "restricted": [ + "surf" + ] + }, + "s": null, + "i": { + "{\"IntermediateResult\":\"result_local_compute_6f5828\"}": null + }, + "r": "result_aggregate_596814", + "m": [], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "str", + "v": "surf_res" + }, + { + "kind": "vrg", + "d": 1 + }, + { + "kind": "fnc", + "d": 3 + } + ], + "n": 5 + }, + { + "kind": "cll", + "i": [ + { + "IntermediateResult": "result_aggregate_596814" + } + ], + "r": [ + { + "Data": "surf_res" + } + ], + "n": 6 + }, + { + "kind": "ret", + "r": [ + { + "Data": "surf_res" + } + ] + } + ], + "funcs": {} +} diff --git a/tests/wir/for.json b/tests/wir/for.json new file mode 100644 index 00000000..d9102b34 --- /dev/null +++ b/tests/wir/for.json @@ -0,0 +1,479 @@ +{ + "id": "workflow-GY9R6G7l", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "i", + "t": { + "kind": "int" + } + }, + { + "n": "i", + "t": { + "kind": "int" + } + }, + { + "n": "a", + "t": { + "kind": "int" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "vrs", + "d": 0 + } + ], + "n": 1 + }, + { + "kind": "loop", + "c": 2, + "b": 6, + "n": 17 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 3 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 10 + } + ], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 5 + }, + { + "kind": "brc", + "t": 6, + "f": 17, + "m": 17 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 7 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + } + ], + "n": 8 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 0 + } + ], + "n": 9 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 10 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": ") It's dancing time!" + } + ], + "n": 11 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 12 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 13 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 14 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + } + ], + "n": 15 + }, + { + "kind": "lin", + "i": [ + { + "kind": "add" + } + ], + "n": 16 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 0 + } + ], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "vrs", + "d": 1 + } + ], + "n": 18 + }, + { + "kind": "loop", + "c": 19, + "b": 23, + "n": 37 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 1 + } + ], + "n": 20 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 10 + } + ], + "n": 21 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 22 + }, + { + "kind": "brc", + "t": 23, + "f": 37, + "m": 37 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 2 + } + ], + "n": 24 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 42 + } + ], + "n": 25 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 2 + } + ], + "n": 26 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 2 + } + ], + "n": 27 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 1 + } + ], + "n": 28 + }, + { + "kind": "lin", + "i": [ + { + "kind": "add" + } + ], + "n": 29 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + } + ], + "n": 30 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 31 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 32 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 1 + } + ], + "n": 33 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + } + ], + "n": 34 + }, + { + "kind": "lin", + "i": [ + { + "kind": "add" + } + ], + "n": 35 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 1 + } + ], + "n": 36 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vru", + "d": 2 + } + ], + "n": 19 + }, + { + "kind": "lin", + "i": [], + "n": 38 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/function.json b/tests/wir/function.json new file mode 100644 index 00000000..42b07a28 --- /dev/null +++ b/tests/wir/function.json @@ -0,0 +1,377 @@ +{ + "id": "workflow-FcbMdsmc", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + }, + { + "n": "hello_there", + "a": [], + "r": { + "kind": "void" + } + }, + { + "n": "say", + "a": [ + { + "kind": "any" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "add", + "a": [ + { + "kind": "any" + }, + { + "kind": "any" + } + ], + "r": { + "kind": "any" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "text", + "t": { + "kind": "any" + } + }, + { + "n": "lhs", + "t": { + "kind": "any" + } + }, + { + "n": "rhs", + "t": { + "kind": "any" + } + }, + { + "n": "result", + "t": { + "kind": "any" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 4 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "General Kenobi!" + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "fnc", + "d": 5 + } + ], + "n": 3 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 21 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "int", + "v": 21 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "fnc", + "d": 6 + } + ], + "n": 5 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 7 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 8 + }, + { + "kind": "stp" + } + ], + "funcs": { + "6": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 2 + }, + { + "kind": "vrs", + "d": 2 + }, + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "vrd", + "d": 3 + }, + { + "kind": "vrg", + "d": 1 + }, + { + "kind": "vrg", + "d": 2 + }, + { + "kind": "add" + }, + { + "kind": "vrs", + "d": 3 + }, + { + "kind": "vrg", + "d": 3 + }, + { + "kind": "vru", + "d": 3 + } + ], + "n": 1 + }, + { + "kind": "ret", + "r": [] + } + ], + "4": [ + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Hello there!" + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "ret", + "r": [] + } + ], + "5": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "ret", + "r": [] + } + ] + } +} diff --git a/tests/wir/hello_world.json b/tests/wir/hello_world.json new file mode 100644 index 00000000..f2e0cec7 --- /dev/null +++ b/tests/wir/hello_world.json @@ -0,0 +1,139 @@ +{ + "id": "workflow-rTqVdnt5", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [ + { + "kind": "cmp", + "p": "hello_world", + "v": "1.0.0", + "d": { + "n": "hello_world", + "a": [], + "r": { + "kind": "str" + } + }, + "a": [], + "r": [] + } + ], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [], + "n": 1 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 2 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 3 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/if.json b/tests/wir/if.json new file mode 100644 index 00000000..263a14cb --- /dev/null +++ b/tests/wir/if.json @@ -0,0 +1,303 @@ +{ + "id": "workflow-EkqxZ2Mp", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "value", + "t": { + "kind": "int" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "int", + "v": 42 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "int", + "v": 42 + }, + { + "kind": "gt" + } + ], + "n": 1 + }, + { + "kind": "brc", + "t": 2, + "f": 5, + "m": 21 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "More than the magic number!" + } + ], + "n": 3 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 4 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 21 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 42 + } + ], + "n": 7 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 8 + }, + { + "kind": "brc", + "t": 9, + "f": 12, + "m": 12 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Less than the magic number!" + } + ], + "n": 10 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 11 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 12 + }, + { + "kind": "lin", + "i": [], + "n": 13 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 14 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 42 + } + ], + "n": 15 + }, + { + "kind": "lin", + "i": [ + { + "kind": "eq" + } + ], + "n": 16 + }, + { + "kind": "brc", + "t": 17, + "f": 20, + "m": 20 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Equal to the magic number!" + } + ], + "n": 18 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 19 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 20 + }, + { + "kind": "lin", + "i": [], + "n": 21 + }, + { + "kind": "lin", + "i": [], + "n": 22 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/if_complex.json b/tests/wir/if_complex.json new file mode 100644 index 00000000..254560c6 --- /dev/null +++ b/tests/wir/if_complex.json @@ -0,0 +1,703 @@ +{ + "id": "workflow-Ut3vTgpJ", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "value", + "t": { + "kind": "int" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "int", + "v": 42 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "gt" + } + ], + "n": 1 + }, + { + "kind": "brc", + "t": 2, + "f": 37, + "m": 69 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 3 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 42 + } + ], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "gt" + } + ], + "n": 5 + }, + { + "kind": "brc", + "t": 6, + "f": 25, + "m": 36 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 7 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 84 + } + ], + "n": 8 + }, + { + "kind": "lin", + "i": [ + { + "kind": "gt" + } + ], + "n": 9 + }, + { + "kind": "brc", + "t": 10, + "f": 13, + "m": 24 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Value is more than 84!" + } + ], + "n": 11 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 12 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 24 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 14 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 84 + } + ], + "n": 15 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 16 + }, + { + "kind": "brc", + "t": 17, + "f": 20, + "m": 23 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Value is less than 84!" + } + ], + "n": 18 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 19 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 23 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Value is 84!" + } + ], + "n": 21 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 22 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 23 + }, + { + "kind": "lin", + "i": [], + "n": 24 + }, + { + "kind": "lin", + "i": [], + "n": 36 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 26 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 42 + } + ], + "n": 27 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 28 + }, + { + "kind": "brc", + "t": 29, + "f": 32, + "m": 35 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Value is less than 42!" + } + ], + "n": 30 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 31 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 35 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Value is 42!" + } + ], + "n": 33 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 34 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 35 + }, + { + "kind": "lin", + "i": [], + "n": 36 + }, + { + "kind": "lin", + "i": [], + "n": 69 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 38 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 0 + } + ], + "n": 39 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 40 + }, + { + "kind": "brc", + "t": 41, + "f": 65, + "m": 68 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Value is less than 0!" + } + ], + "n": 42 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 43 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 44 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 45 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 42 + } + ], + "n": 46 + }, + { + "kind": "lin", + "i": [ + { + "kind": "neg" + } + ], + "n": 47 + }, + { + "kind": "lin", + "i": [ + { + "kind": "gt" + } + ], + "n": 48 + }, + { + "kind": "brc", + "t": 49, + "f": 52, + "m": 64 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Value is more than -42!" + } + ], + "n": 50 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 51 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 64 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 53 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 42 + } + ], + "n": 54 + }, + { + "kind": "lin", + "i": [ + { + "kind": "neg" + } + ], + "n": 55 + }, + { + "kind": "lin", + "i": [ + { + "kind": "lt" + } + ], + "n": 56 + }, + { + "kind": "brc", + "t": 57, + "f": 60, + "m": 63 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Value is less than -42!" + } + ], + "n": 58 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 59 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 63 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Value is -42!" + } + ], + "n": 61 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 62 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 63 + }, + { + "kind": "lin", + "i": [], + "n": 64 + }, + { + "kind": "lin", + "i": [], + "n": 68 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Value is 0!" + } + ], + "n": 66 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 67 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 68 + }, + { + "kind": "lin", + "i": [], + "n": 69 + }, + { + "kind": "lin", + "i": [], + "n": 70 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/import.json b/tests/wir/import.json new file mode 100644 index 00000000..7e0bfb4f --- /dev/null +++ b/tests/wir/import.json @@ -0,0 +1,159 @@ +{ + "id": "workflow-OyxK53zY", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [ + { + "kind": "cmp", + "p": "test", + "v": "1.0.0", + "d": { + "n": "hello_world", + "a": [], + "r": { + "kind": "str" + } + }, + "a": [], + "r": [] + } + ], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "TestClass", + "i": "test", + "v": "1.0.0", + "p": [ + { + "n": "fourty_two", + "t": { + "kind": "int" + } + }, + { + "n": "hello", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [], + "n": 1 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 2 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 3 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/math.json b/tests/wir/math.json new file mode 100644 index 00000000..44ee22c5 --- /dev/null +++ b/tests/wir/math.json @@ -0,0 +1,229 @@ +{ + "id": "workflow-sGeqOere", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "add" + }, + { + "kind": "pop" + }, + { + "kind": "int", + "v": 2 + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "sub" + }, + { + "kind": "pop" + }, + { + "kind": "int", + "v": 42 + }, + { + "kind": "int", + "v": 55 + }, + { + "kind": "mul" + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "add" + }, + { + "kind": "pop" + }, + { + "kind": "int", + "v": 33333 + }, + { + "kind": "int", + "v": 33333 + }, + { + "kind": "mul" + }, + { + "kind": "pop" + }, + { + "kind": "int", + "v": 828 + }, + { + "kind": "int", + "v": 12318 + }, + { + "kind": "int", + "v": 123123 + }, + { + "kind": "mod" + }, + { + "kind": "int", + "v": 1231231231 + }, + { + "kind": "div" + }, + { + "kind": "add" + }, + { + "kind": "pop" + }, + { + "kind": "rel", + "v": 12.0 + }, + { + "kind": "int", + "v": 123123 + }, + { + "kind": "cst", + "t": { + "kind": "real" + } + }, + { + "kind": "add" + }, + { + "kind": "pop" + }, + { + "kind": "rel", + "v": 777777.777777 + }, + { + "kind": "pop" + }, + { + "kind": "rel", + "v": 5550000000000.0 + }, + { + "kind": "pop" + } + ], + "n": 1 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/metadata.json b/tests/wir/metadata.json new file mode 100644 index 00000000..a25db7e6 --- /dev/null +++ b/tests/wir/metadata.json @@ -0,0 +1,143 @@ +{ + "id": "workflow-OOZguqrv", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [ + { + "kind": "cmp", + "p": "hello_world", + "v": "1.0.0", + "d": { + "n": "hello_world", + "a": [], + "r": { + "kind": "str" + } + }, + "a": [], + "r": [] + } + ], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "nod", + "t": 0, + "l": { + "restricted": [ + "test" + ] + }, + "s": null, + "i": {}, + "r": null, + "m": [], + "n": 1 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 2 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 3 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/on.json b/tests/wir/on.json new file mode 100644 index 00000000..25591348 --- /dev/null +++ b/tests/wir/on.json @@ -0,0 +1,306 @@ +{ + "id": "workflow-bozR4uTg", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + }, + { + "n": "fourty_two", + "a": [], + "r": { + "kind": "str" + } + } + ], + "tasks": [ + { + "kind": "cmp", + "p": "test", + "v": "1.0.0", + "d": { + "n": "hello_world", + "a": [], + "r": { + "kind": "str" + } + }, + "a": [], + "r": [] + } + ], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "TestClass", + "i": "test", + "v": "1.0.0", + "p": [ + { + "n": "fourty_two", + "t": { + "kind": "int" + } + }, + { + "n": "hello", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "random", + "t": { + "kind": "str" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "nod", + "t": 0, + "l": "all", + "s": null, + "i": {}, + "r": null, + "m": [], + "n": 1 + }, + { + "kind": "lin", + "i": [ + { + "kind": "pop" + }, + { + "kind": "str", + "v": "I'm talking to you from far away!" + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 2 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 3 + }, + { + "kind": "nod", + "t": 0, + "l": { + "restricted": [ + "test" + ] + }, + "s": null, + "i": {}, + "r": null, + "m": [], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "pop" + }, + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "str", + "v": "random" + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "str", + "v": "I have no idea where I am!" + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 5 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 6 + }, + { + "kind": "nod", + "t": 0, + "l": { + "restricted": [ + "random_but_not_really_anymore" + ] + }, + "s": null, + "i": {}, + "r": null, + "m": [], + "n": 7 + }, + { + "kind": "lin", + "i": [ + { + "kind": "pop" + }, + { + "kind": "str", + "v": "I have found the answer" + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 8 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 9 + }, + { + "kind": "nod", + "t": 0, + "l": { + "restricted": [ + "42" + ] + }, + "s": null, + "i": {}, + "r": null, + "m": [], + "n": 10 + }, + { + "kind": "lin", + "i": [ + { + "kind": "pop" + } + ], + "n": 11 + }, + { + "kind": "stp" + } + ], + "funcs": { + "4": [ + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "42" + } + ], + "n": 1 + }, + { + "kind": "ret", + "r": [] + } + ] + } +} diff --git a/tests/wir/parallel.json b/tests/wir/parallel.json new file mode 100644 index 00000000..ac47caa6 --- /dev/null +++ b/tests/wir/parallel.json @@ -0,0 +1,456 @@ +{ + "id": "workflow-pRA42iRy", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "test", + "t": { + "kind": "arr", + "t": { + "kind": "str" + } + } + }, + { + "n": "sum", + "t": { + "kind": "int" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "par", + "b": [ + 1, + 5, + 9 + ], + "m": 13 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Running in parallel..." + } + ], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 3 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 4 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "...so we can..." + } + ], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 7 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 8 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "...finish each other's sentences!" + } + ], + "n": 10 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 11 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 12 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "join", + "m": "None", + "n": 14 + }, + { + "kind": "par", + "b": [ + 15 + ], + "m": 17 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "No parallelism at all, gnagnagnagna" + } + ], + "n": 16 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "join", + "m": "All", + "n": 18 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 19 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 20 + }, + { + "kind": "par", + "b": [ + 21, + 23, + 25, + 27 + ], + "m": 29 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + } + ], + "n": 22 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 2 + } + ], + "n": 24 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 3 + } + ], + "n": 26 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 4 + } + ], + "n": 28 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "join", + "m": "Sum", + "n": 30 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "vrg", + "d": 1 + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 31 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 32 + }, + { + "kind": "par", + "b": [ + 33, + 37 + ], + "m": 41 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Running somewhere randomly!" + } + ], + "n": 34 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 35 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 36 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "Running somewhere specific but in parallel!" + } + ], + "n": 38 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 1 + } + ], + "n": 39 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 40 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "join", + "m": "None", + "n": 42 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/recursion.json b/tests/wir/recursion.json new file mode 100644 index 00000000..dacf345a --- /dev/null +++ b/tests/wir/recursion.json @@ -0,0 +1,655 @@ +{ + "id": "workflow-yRUal9ku", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + }, + { + "n": "fibonacci", + "a": [ + { + "kind": "any" + }, + { + "kind": "any" + }, + { + "kind": "any" + } + ], + "r": { + "kind": "int" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "n", + "t": { + "kind": "any" + } + }, + { + "n": "n_1", + "t": { + "kind": "any" + } + }, + { + "n": "i", + "t": { + "kind": "any" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 0 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "fnc", + "d": 4 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 3 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 4 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 0 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "int", + "v": 5 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "fnc", + "d": 4 + } + ], + "n": 5 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 7 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 8 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 0 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "int", + "v": 10 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "fnc", + "d": 4 + } + ], + "n": 9 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 10 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 11 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 12 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 0 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "int", + "v": 20 + }, + { + "kind": "cst", + "t": { + "kind": "any" + } + }, + { + "kind": "fnc", + "d": 4 + } + ], + "n": 13 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 14 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 15 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 16 + }, + { + "kind": "stp" + } + ], + "funcs": { + "4": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 2 + }, + { + "kind": "vrs", + "d": 2 + }, + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrg", + "d": 2 + }, + { + "kind": "int", + "v": 0 + }, + { + "kind": "eq" + } + ], + "n": 1 + }, + { + "kind": "brc", + "t": 2, + "f": 4, + "m": null + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 0 + } + ], + "n": 3 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 2 + } + ], + "n": 5 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + } + ], + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "eq" + } + ], + "n": 7 + }, + { + "kind": "brc", + "t": 8, + "f": 11, + "m": null + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 1 + } + ], + "n": 9 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "int" + } + } + ], + "n": 10 + }, + { + "kind": "ret", + "r": [] + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 1 + } + ], + "n": 12 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 13 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 1 + } + ], + "n": 14 + }, + { + "kind": "lin", + "i": [ + { + "kind": "add" + } + ], + "n": 15 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 2 + } + ], + "n": 16 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "int" + } + } + ], + "n": 17 + }, + { + "kind": "lin", + "i": [ + { + "kind": "int", + "v": 1 + } + ], + "n": 18 + }, + { + "kind": "lin", + "i": [ + { + "kind": "sub" + } + ], + "n": 19 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "any" + } + } + ], + "n": 20 + }, + { + "kind": "lin", + "i": [ + { + "kind": "fnc", + "d": 4 + } + ], + "n": 21 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 22 + }, + { + "kind": "lin", + "i": [ + { + "kind": "cst", + "t": { + "kind": "int" + } + } + ], + "n": 23 + }, + { + "kind": "ret", + "r": [] + } + ] + } +} diff --git a/tests/wir/scopes.json b/tests/wir/scopes.json new file mode 100644 index 00000000..7d08d99b --- /dev/null +++ b/tests/wir/scopes.json @@ -0,0 +1,219 @@ +{ + "id": "workflow-5UrAS8ep", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + }, + { + "n": "test", + "a": [ + { + "kind": "any" + } + ], + "r": { + "kind": "void" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "text", + "t": { + "kind": "any" + } + }, + { + "n": "i", + "t": { + "kind": "int" + } + }, + { + "n": "j", + "t": { + "kind": "int" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "vrd", + "d": 2 + }, + { + "kind": "vrg", + "d": 1 + }, + { + "kind": "vrs", + "d": 2 + } + ], + "n": 1 + }, + { + "kind": "stp" + } + ], + "funcs": { + "4": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "str", + "v": "Test123" + }, + { + "kind": "fnc", + "d": 0 + } + ], + "n": 1 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "fnc", + "d": 0 + } + ], + "n": 3 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 4 + }, + { + "kind": "ret", + "r": [] + } + ] + } +} diff --git a/tests/wir/vars.json b/tests/wir/vars.json new file mode 100644 index 00000000..5802b731 --- /dev/null +++ b/tests/wir/vars.json @@ -0,0 +1,189 @@ +{ + "id": "workflow-T2lu5VhH", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "test", + "t": { + "kind": "int" + } + }, + { + "n": "abc__2", + "t": { + "kind": "str" + } + }, + { + "n": "new_test", + "t": { + "kind": "str" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "int", + "v": 1 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "int", + "v": 2 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "int", + "v": 3 + }, + { + "kind": "vrs", + "d": 0 + }, + { + "kind": "vrd", + "d": 1 + }, + { + "kind": "str", + "v": "Hello there!" + }, + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "bol", + "v": true + }, + { + "kind": "cst", + "t": { + "kind": "str" + } + }, + { + "kind": "vrs", + "d": 1 + }, + { + "kind": "vrd", + "d": 2 + }, + { + "kind": "str", + "v": "Keyword bug: fixed" + }, + { + "kind": "vrs", + "d": 2 + } + ], + "n": 1 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} diff --git a/tests/wir/while.json b/tests/wir/while.json new file mode 100644 index 00000000..21517dca --- /dev/null +++ b/tests/wir/while.json @@ -0,0 +1,185 @@ +{ + "id": "workflow-Wx0xKTBQ", + "table": { + "funcs": [ + { + "n": "print", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "println", + "a": [ + { + "kind": "str" + } + ], + "r": { + "kind": "void" + } + }, + { + "n": "len", + "a": [ + { + "kind": "arr", + "t": { + "kind": "any" + } + } + ], + "r": { + "kind": "int" + } + }, + { + "n": "commit_result", + "a": [ + { + "kind": "str" + }, + { + "kind": "res" + } + ], + "r": { + "kind": "data" + } + } + ], + "tasks": [], + "classes": [ + { + "n": "Data", + "i": null, + "v": null, + "p": [ + { + "n": "name", + "t": { + "kind": "str" + } + } + ], + "m": [] + }, + { + "n": "IntermediateResult", + "i": null, + "v": null, + "p": [ + { + "n": "path", + "t": { + "kind": "str" + } + } + ], + "m": [] + } + ], + "vars": [ + { + "n": "condition", + "t": { + "kind": "bool" + } + } + ], + "results": {} + }, + "metadata": [], + "user": null, + "graph": [ + { + "kind": "lin", + "i": [ + { + "kind": "vrd", + "d": 0 + }, + { + "kind": "bol", + "v": true + }, + { + "kind": "vrs", + "d": 0 + } + ], + "n": 1 + }, + { + "kind": "loop", + "c": 2, + "b": 4, + "n": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrg", + "d": 0 + } + ], + "n": 3 + }, + { + "kind": "brc", + "t": 4, + "f": 6, + "m": 6 + }, + { + "kind": "lin", + "i": [ + { + "kind": "bol", + "v": false + } + ], + "n": 5 + }, + { + "kind": "lin", + "i": [ + { + "kind": "vrs", + "d": 0 + } + ], + "n": 2 + }, + { + "kind": "lin", + "i": [ + { + "kind": "str", + "v": "That was one iteration :)" + }, + { + "kind": "fnc", + "d": 1 + } + ], + "n": 7 + }, + { + "kind": "cll", + "i": [], + "r": [], + "n": 8 + }, + { + "kind": "stp" + } + ], + "funcs": {} +} From 8bfc9f4ca5e901fb1aa5c375e720a6c55570af40 Mon Sep 17 00:00:00 2001 From: Lut99 Date: Tue, 27 May 2025 16:27:25 +0200 Subject: [PATCH 02/39] Added an actually working Dockerfile.let now (at least on Linux) --- Dockerfile.let | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/Dockerfile.let b/Dockerfile.let index 3e00cddd..0809d13f 100644 --- a/Dockerfile.let +++ b/Dockerfile.let @@ -2,10 +2,10 @@ # by Tim Müller # # Contains the Dockerfile for building a `branelet` binary in a container. -# +# # This is necessary when running a non-released Brane (i.e., no precompiled binaries) and on a # system that has a newer GLIBC than would run in the container. -# +# # The easiest way to use it is through `make brane-let-docker` # @@ -19,8 +19,8 @@ ARG USERID=1000 ARG GROUPID=1000 # Setup a user mirroring the main one -RUN if [[ ! "$(getent group $GROUPID)" ]]; then addgroup --gid $GROUPID brane && export GROUP=brane else export GROUP=$(getent group $GROUPID | cut -d: -f1); fi -RUN adduser --uid $USERID --gid $GROUPID --gecos "Brane" --disabled-password brane +RUN if [ -z "$(getent group "$GROUPID")" ]; then groupadd -g "$GROUPID" brane; fi +RUN useradd -u "$USERID" -g "$GROUPID" -m brane # Install build dependencies RUN apt-get update && DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install -y \ @@ -29,27 +29,28 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install perl curl \ && rm -rf /var/lib/apt/lists/* -# Prepare the build directory while we're root -RUN mkdir -p /build/target \ - && chown -R brane:$GROUP /build - # Install rust USER brane RUN bash -c "curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y" \ && echo ". /home/brane/.cargo/env" >> /home/brane/.profile # Copy over relevant crates & other files -COPY --chown=brane:$GROUP . /build +USER root +COPY . /build +RUN mkdir -p /build/target \ + && chown -R brane:$(getent group "$GROUPID" | cut -d: -f1) /build # Build the binary WORKDIR /build +USER brane RUN --mount=type=cache,id=cargoidx,uid=$USERID,target=/home/brane/.cargo/registry \ --mount=type=cache,id=braneletcache,uid=$USERID,target=/build/target \ . /home/brane/.profile \ && cargo build \ - --release \ - --package brane-let \ + --release \ + --package brane-let \ && cp ./target/release/branelet /home/brane/branelet # Done ENTRYPOINT ["cp", "/home/brane/branelet", "/output/branelet"] + From fee4e8ce5cec0bb70ad8d166e1d6ac0b250329c4 Mon Sep 17 00:00:00 2001 From: Lut99 Date: Tue, 27 May 2025 16:40:31 +0200 Subject: [PATCH 03/39] Fixed dangling links in docs --- brane-ast/src/dsl.rs | 2 +- brane-chk/src/apis/reasoner.rs | 4 ++-- brane-chk/src/reasonerconn.rs | 2 +- brane-chk/src/workflow/compile.rs | 2 +- brane-chk/src/workflow/eflint.rs | 5 ++--- brane-chk/src/workflow/preprocess.rs | 2 +- brane-cli/src/run.rs | 2 +- brane-tsk/src/errors.rs | 2 +- specifications/src/checking.rs | 6 +++--- specifications/src/working.rs | 8 ++++---- 10 files changed, 17 insertions(+), 18 deletions(-) diff --git a/brane-ast/src/dsl.rs b/brane-ast/src/dsl.rs index 6efd1c57..9d85df72 100644 --- a/brane-ast/src/dsl.rs +++ b/brane-ast/src/dsl.rs @@ -60,7 +60,7 @@ pub fn dtype_dsl_to_ast(value: brane_dsl::DataType) -> DataType { /// Converts from an [executable Datatype](DataType) to the DSL one. /// /// # Arguments -/// - `dtype`: The [`DataType`](DataType) to convert. +/// - `dtype`: The [`DataType`] to convert. /// /// # Returns /// A converted [`brane_dsl::DataType`]. diff --git a/brane-chk/src/apis/reasoner.rs b/brane-chk/src/apis/reasoner.rs index 47490522..d9d00341 100644 --- a/brane-chk/src/apis/reasoner.rs +++ b/brane-chk/src/apis/reasoner.rs @@ -32,7 +32,7 @@ use tracing::{Instrument as _, Level, debug, error, span}; /***** ERRORS *****/ -/// Defines the errors originating in the [`Reasoner`] API. +/// Defines the errors originating in the reasoner API. #[derive(Debug, Error)] pub enum Error { #[error("Failed to create the KID resolver")] @@ -56,7 +56,7 @@ pub enum Error { /// Handler for `GET /v2/context` (i.e., retrieving reasoner context). /// /// Out: -/// - 200 OK with a [`ContextResponse`] detailling the relevant reasoner information; or +/// - 200 OK with a [`GetContextResponse`] detailling the relevant reasoner information; or /// - 500 INTERNAL SERVER ERROR with a message what went wrong. pub fn get_context(State(this): State>, Extension(auth): Extension) -> impl Send + Future where diff --git a/brane-chk/src/reasonerconn.rs b/brane-chk/src/reasonerconn.rs index 44feb4a2..7f6227b8 100644 --- a/brane-chk/src/reasonerconn.rs +++ b/brane-chk/src/reasonerconn.rs @@ -9,7 +9,7 @@ // Yes // // Description: -//! Defines a wrapper around an [`EFlintJsonReasonerConnector`] that +//! Defines a wrapper around an [`EFlintHaskellReasonerConnector`] that //! includes a particular policy interface. // diff --git a/brane-chk/src/workflow/compile.rs b/brane-chk/src/workflow/compile.rs index d5536a3d..3adc3bb4 100644 --- a/brane-chk/src/workflow/compile.rs +++ b/brane-chk/src/workflow/compile.rs @@ -450,7 +450,7 @@ pub fn pc_to_id(wir: &ast::Workflow, pc: ProgramCounter) -> String { format!("{} -/// Compiles from a Brane [WIR](brane_ast::Workflow) to a policy reasoner [Workflow]. +/// Compiles from a Brane [WIR](ast::Workflow) to a policy reasoner [Workflow]. /// /// # Arguments /// - `wf`: The WIR to compile. diff --git a/brane-chk/src/workflow/eflint.rs b/brane-chk/src/workflow/eflint.rs index db53ed14..6e55ad08 100644 --- a/brane-chk/src/workflow/eflint.rs +++ b/brane-chk/src/workflow/eflint.rs @@ -9,8 +9,7 @@ // Yes // // Description: -//! Implements a compiler from a [`Workflow`] to a series of -//! [`efint_json`] [`Phrase`]s. +//! Implements a compiler from a [`Workflow`] to a series of eFLINT phrases. // use std::collections::{HashMap, HashSet}; @@ -473,7 +472,7 @@ impl<'w> Visitor<'w> for EFlintCompiler<'w> { /***** LIBRARY FUNCTIONS *****/ -/// Compiles a [`Workflow`] to a series of [`efint_json`] [`Phrase`]s. +/// Compiles a [`Workflow`] to a series of eFLINT phrases. /// /// # Arguments /// - `wf`: The [`Workflow`] to compile. diff --git a/brane-chk/src/workflow/preprocess.rs b/brane-chk/src/workflow/preprocess.rs index 1ee35dc5..551a3c3b 100644 --- a/brane-chk/src/workflow/preprocess.rs +++ b/brane-chk/src/workflow/preprocess.rs @@ -220,7 +220,7 @@ pub enum Error { UnknownTask { id: usize }, /// Unknown function given. UnknownFunc { id: FunctionId }, - /// A [`Call`](ast::Edge::Call)-edge was encountered while we didn't know of a function ID on the stack. + /// A [`Call`](Edge::Call)-edge was encountered while we didn't know of a function ID on the stack. CallingWithoutId { pc: ResolvedProgramCounter }, } impl Display for Error { diff --git a/brane-cli/src/run.rs b/brane-cli/src/run.rs index 6de54fac..ed2a9005 100644 --- a/brane-cli/src/run.rs +++ b/brane-cli/src/run.rs @@ -128,7 +128,7 @@ pub async fn initialize_instance( /// - `profile`: If given, prints the profile timings to stdout if reported by the remote. /// /// # Returns -/// A [`FullValue`] carrying the result of the snippet (or [`FullValue::Void`]), and a [`ProgramCounter`] in case a [`FullValue::Data`] is returned telling us which edge downloaded it. +/// A [`FullValue`] carrying the result of the snippet (or [`FullValue::Void`]). /// /// # Errors /// This function may error if anything in the whole shebang crashed. This can be things client-side, but also remote-side. diff --git a/brane-tsk/src/errors.rs b/brane-tsk/src/errors.rs index 879d883c..aa772dcf 100644 --- a/brane-tsk/src/errors.rs +++ b/brane-tsk/src/errors.rs @@ -412,7 +412,7 @@ pub enum AuthorizeError { /// The user to authorize does not execute the given task. #[error("Authorized user '{}' does not match '{}' user in workflow\n\nWorkflow:\n{:#?}\n", authenticated, who, workflow)] AuthorizationUserMismatch { who: String, authenticated: String, workflow: Workflow }, - /// An edge was referenced to be executed which wasn't an [`Edge::Node`](brane_ast::ast::Edge). + /// An edge was referenced to be executed which wasn't an [`Edge::Node`](specifications::wir::Edge). #[error("Edge {pc} in workflow is not an Edge::Node but an Edge::{got}")] AuthorizationWrongEdge { pc: ProgramCounter, got: String }, /// An edge index given was out-of-bounds for the given function. diff --git a/specifications/src/checking.rs b/specifications/src/checking.rs index 8f460e1e..8b335a05 100644 --- a/specifications/src/checking.rs +++ b/specifications/src/checking.rs @@ -121,7 +121,7 @@ pub mod deliberation { /***** API BODIES *****/ - /// Defines the request to send to the [`Server::check_workflow()`] endpoint. + /// Defines the request to send to the workflow validation endpoint. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct CheckWorkflowRequest { /// The usecase that refers to the API to consult for state. @@ -220,7 +220,7 @@ pub mod deliberation { - /// Defines the request to send to the [`Server::check_task()`] endpoint. + /// Defines the request to send to the task validation endpoint. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct CheckTaskRequest { /// The usecase that refers to the API to consult for state. @@ -338,7 +338,7 @@ pub mod deliberation { - /// Defines the request to send to the [`Server::check_transfer()`] endpoint. + /// Defines the request to send to the transfer validation endpoint. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct CheckTransferRequest { /// The usecase that refers to the API to consult for state. diff --git a/specifications/src/working.rs b/specifications/src/working.rs index 75232559..eaaff4d1 100644 --- a/specifications/src/working.rs +++ b/specifications/src/working.rs @@ -385,7 +385,7 @@ impl JobServiceClient { /// - `request`: The [`CheckWorkflowRequest`] to send to the endpoint. /// /// # Returns - /// The [`CheckReply`] the endpoint returns. + /// The [`CheckResponse`] the endpoint returns. /// /// # Errors /// This function errors if either we failed to send the request or the endpoint itself failed to process it. @@ -410,7 +410,7 @@ impl JobServiceClient { /// - `request`: The [`CheckTaskRequest`] to send to the endpoint. /// /// # Returns - /// The [`CheckReply`] the endpoint returns. + /// The [`CheckResponse`] the endpoint returns. /// /// # Errors /// This function errors if either we failed to send the request or the endpoint itself failed to process it. @@ -514,7 +514,7 @@ pub trait JobService: 'static + Send + Sync { /// - `request`: The ([`tonic::Request`]-wrapped) [`CheckWorkflowRequest`] containing the relevant details. /// /// # Returns - /// A [`CheckReply`] for this request, wrapped in a [`tonic::Response`]. + /// A [`CheckResponse`] for this request, wrapped in a [`tonic::Response`]. /// /// # Errors /// This function may error (i.e., send back a `tonic::Status`) whenever it fails. @@ -529,7 +529,7 @@ pub trait JobService: 'static + Send + Sync { /// - `request`: The ([`tonic::Request`]-wrapped) [`CheckTaskRequest`] containing the relevant details. /// /// # Returns - /// A [`CheckReply`] for this request, wrapped in a [`tonic::Response`]. + /// A [`CheckResponse`] for this request, wrapped in a [`tonic::Response`]. /// /// # Errors /// This function may error (i.e., send back a `tonic::Status`) whenever it fails. From a3db81c4839fed8fea4895ba83339a7cc57fb90d Mon Sep 17 00:00:00 2001 From: Lut99 Date: Tue, 27 May 2025 17:02:57 +0200 Subject: [PATCH 04/39] Clippy fixes --- brane-ast/src/dsl.rs | 4 +- brane-ast/src/state.rs | 8 +-- brane-chk/Cargo.toml | 26 ++++++++-- brane-chk/src/apis/deliberation.rs | 2 +- brane-chk/src/reasonerconn.rs | 6 +-- brane-chk/src/stateresolver.rs | 4 +- brane-chk/src/workflow/compile.rs | 6 +-- brane-chk/src/workflow/compiler.rs | 2 +- brane-chk/src/workflow/eflint.rs | 8 +-- brane-chk/src/workflow/preprocess.rs | 4 ++ brane-cli/src/instance.rs | 4 +- brane-ctl/src/generate.rs | 30 ++++++------ brane-job/src/worker.rs | 6 +++ brane-tsk/src/docker.rs | 14 +++--- brane-tsk/src/input.rs | 2 +- specifications/src/address.rs | 73 +++------------------------- specifications/src/checking.rs | 70 ++++++++++++++++++-------- specifications/src/wir/mod.rs | 4 +- 18 files changed, 132 insertions(+), 141 deletions(-) diff --git a/brane-ast/src/dsl.rs b/brane-ast/src/dsl.rs index 9d85df72..34d575aa 100644 --- a/brane-ast/src/dsl.rs +++ b/brane-ast/src/dsl.rs @@ -41,9 +41,7 @@ pub fn dtype_dsl_to_ast(value: brane_dsl::DataType) -> DataType { Semver => DataType::Semver, Array(a) => DataType::Array { elem_type: Box::new(dtype_dsl_to_ast(*a)) }, - Function(sig) => { - DataType::Function { args: sig.args.into_iter().map(|d| dtype_dsl_to_ast(d)).collect(), ret: Box::new(dtype_dsl_to_ast(sig.ret)) } - }, + Function(sig) => DataType::Function { args: sig.args.into_iter().map(dtype_dsl_to_ast).collect(), ret: Box::new(dtype_dsl_to_ast(sig.ret)) }, Class(name) => { // Match if 'Data' or 'IntermediateResult' if name == BuiltinClasses::Data.name() { diff --git a/brane-ast/src/state.rs b/brane-ast/src/state.rs index b3066715..ae396d8c 100644 --- a/brane-ast/src/state.rs +++ b/brane-ast/src/state.rs @@ -357,7 +357,7 @@ impl From for FunctionDef { fn from(value: FunctionState) -> Self { FunctionDef { name: value.name, - args: value.signature.args.into_iter().map(|d| dtype_dsl_to_ast(d)).collect(), + args: value.signature.args.into_iter().map(dtype_dsl_to_ast).collect(), ret: dtype_dsl_to_ast(value.signature.ret), } } @@ -416,7 +416,7 @@ impl From for TaskDef { function: Box::new(FunctionDef { name: value.name, - args: value.signature.args.into_iter().map(|d| dtype_dsl_to_ast(d)).collect(), + args: value.signature.args.into_iter().map(dtype_dsl_to_ast).collect(), ret: dtype_dsl_to_ast(value.signature.ret), }), args_names: value.arg_names, @@ -459,7 +459,7 @@ impl ClassState { // Collect the properties let props: Vec = builtin .props() - .into_iter() + .iter() .map(|(name, dtype)| VarState { name: (*name).into(), data_type: dtype_ast_to_dsl(dtype.clone()), @@ -472,7 +472,7 @@ impl ClassState { // Collect the methods let methods: Vec = builtin .methods() - .into_iter() + .iter() .enumerate() .map(|(i, (name, sig))| { funcs.push(FunctionState { diff --git a/brane-chk/Cargo.toml b/brane-chk/Cargo.toml index 47ec5be8..16676a26 100644 --- a/brane-chk/Cargo.toml +++ b/brane-chk/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "brane-chk" edition = "2021" -rust-version = "1.74" +rust-version = "1.80" version.workspace = true repository.workspace = true authors = ["Tim Müller", "Bas Kloosterman", "Daniel Voogsgerd"] @@ -38,9 +38,22 @@ tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } # eflint-json = { git = "https://gitlab.com/eflint/json-spec-rs", branch = "incorrect-is-invariant", features = ["display_eflint"] } enum-debug = { git = "https://github.com/Lut99/enum-debug", tag = "v1.1.0" } -error-trace = { git = "https://github.com/Lut99/error-trace-rs", tag = "v3.3.0", features = ["serde"] } -policy-reasoner = { git = "https://github.com/BraneFramework/policy-reasoner", branch = "lib-refactor", default-features = false, features = ["eflint-haskell-reasoner", "file-logger", "serde", "workflow"] } -policy-store = { git = "https://github.com/BraneFramework/policy-store", default-features = false, features = ["axum-server", "jwk-auth", "jwk-auth-kid", "sqlite-database", "sqlite-database-embedded-migrations"] } +error-trace = { git = "https://github.com/Lut99/error-trace-rs", tag = "v3.3.0", features = [ + "serde", +] } +policy-reasoner = { git = "https://github.com/BraneFramework/policy-reasoner", branch = "lib-refactor", default-features = false, features = [ + "eflint-haskell-reasoner", + "file-logger", + "serde", + "workflow", +] } +policy-store = { git = "https://github.com/BraneFramework/policy-store", default-features = false, features = [ + "axum-server", + "jwk-auth", + "jwk-auth-kid", + "sqlite-database", + "sqlite-database-embedded-migrations", +] } brane-ast = { path = "../brane-ast" } brane-cfg = { path = "../brane-cfg" } @@ -50,7 +63,10 @@ specifications = { path = "../specifications" } [dev-dependencies] humanlog = { git = "https://github.com/Lut99/humanlog-rs", tag = "v0.2.0" } -names = { git = "https://github.com/Lut99/names-rs", features = ["rand", "three"] } +names = { git = "https://github.com/Lut99/names-rs", features = [ + "rand", + "three", +] } brane-shr = { path = "../brane-shr" } diff --git a/brane-chk/src/apis/deliberation.rs b/brane-chk/src/apis/deliberation.rs index ab61c01c..dc29cd21 100644 --- a/brane-chk/src/apis/deliberation.rs +++ b/brane-chk/src/apis/deliberation.rs @@ -57,7 +57,7 @@ use crate::workflow::compile::pc_to_id; /***** CONSTANTS *****/ /// The initiator claim that must be given in the input header token. -pub const INITIATOR_CLAIM: &'static str = "username"; +pub const INITIATOR_CLAIM: &str = "username"; diff --git a/brane-chk/src/reasonerconn.rs b/brane-chk/src/reasonerconn.rs index 7f6227b8..18faf952 100644 --- a/brane-chk/src/reasonerconn.rs +++ b/brane-chk/src/reasonerconn.rs @@ -56,13 +56,13 @@ impl EFlintHaskellReasonerConnectorWithInterface { /// its manifest directory). Building the reasoner will trigger the first load, if any, /// and this may panic if the input is somehow ill-formed. #[inline] - pub fn new_async<'l, L: AuditLogger>( + pub async fn new_async<'l, L: AuditLogger>( cmd: impl 'l + IntoIterator, base_policy_path: impl 'l + Into, handler: PrefixedHandler<'static>, logger: &'l L, - ) -> impl 'l + Future> { - async move { Ok(Self { reasoner: EFlintHaskellReasonerConnector::new_async(cmd, base_policy_path, handler, logger).await? }) } + ) -> Result { + Ok(Self { reasoner: EFlintHaskellReasonerConnector::new_async(cmd, base_policy_path, handler, logger).await? }) } } impl ReasonerConnector for EFlintHaskellReasonerConnectorWithInterface { diff --git a/brane-chk/src/stateresolver.rs b/brane-chk/src/stateresolver.rs index 8185f375..aa00d87d 100644 --- a/brane-chk/src/stateresolver.rs +++ b/brane-chk/src/stateresolver.rs @@ -46,7 +46,7 @@ use crate::workflow::compile; static DATABASE_USER: LazyLock = LazyLock::new(|| User { id: "brane".into(), name: "Brane".into() }); /// The special policy that is used when the database doesn't mention any active. -static DENY_ALL_POLICY: &'static str = "Invariant contradiction When False."; +static DENY_ALL_POLICY: &str = "Invariant contradiction When False."; @@ -350,7 +350,7 @@ async fn assert_workflow_context(_wf: &Workflow, usecase: &str, usecases: &HashM async fn get_active_policy(base_policy_hash: &str, db: &SQLiteDatabase, res: &mut String) -> Result<(), Error> { // Time to fetch a connection debug!("Connecting to backend database..."); - let mut conn: SQLiteConnection = match db.connect(&*DATABASE_USER).await { + let mut conn: SQLiteConnection = match db.connect(&DATABASE_USER).await { Ok(conn) => conn, Err(err) => return Err(Error::DatabaseConnect { err }), }; diff --git a/brane-chk/src/workflow/compile.rs b/brane-chk/src/workflow/compile.rs index 3adc3bb4..0bbd2fc2 100644 --- a/brane-chk/src/workflow/compile.rs +++ b/brane-chk/src/workflow/compile.rs @@ -30,10 +30,10 @@ use super::{preprocess, utils}; /***** CONSTANTS *****/ /// The name of the special commit call. -pub const COMMIT_CALL_NAME: &'static str = "__brane_internals::commit"; +pub const COMMIT_CALL_NAME: &str = "__brane_internals::commit"; /// The name of the special identity function call. -pub const TOPLEVEL_RETURN_CALL_NAME: &'static str = "__brane_internals::toplevel_return"; +pub const TOPLEVEL_RETURN_CALL_NAME: &str = "__brane_internals::toplevel_return"; @@ -256,7 +256,7 @@ fn reconstruct_graph( // Return the elem Ok(Elem::Call(ElemCall { - id: pc_to_id(&wir, pc), + id: pc_to_id(wir, pc), task: format!("{}[{}]::{}", def.package, def.version, def.function.name), input: input .iter() diff --git a/brane-chk/src/workflow/compiler.rs b/brane-chk/src/workflow/compiler.rs index a5551bf2..961a4f95 100644 --- a/brane-chk/src/workflow/compiler.rs +++ b/brane-chk/src/workflow/compiler.rs @@ -265,7 +265,7 @@ fn workflow_to_output(path: &str, lang: OutputLanguage, workflow: Workflow) { // OK, now write to out or stdout if path == "-" { debug!("Writing result to stdout..."); - if let Err(err) = std::io::stdout().write_all(&output.as_bytes()) { + if let Err(err) = std::io::stdout().write_all(output.as_bytes()) { error!("{}", trace!(("Failed to write to stdout"), err)); std::process::exit(1); } diff --git a/brane-chk/src/workflow/eflint.rs b/brane-chk/src/workflow/eflint.rs index 6e55ad08..47359f83 100644 --- a/brane-chk/src/workflow/eflint.rs +++ b/brane-chk/src/workflow/eflint.rs @@ -100,9 +100,9 @@ fn compile_metadata(metadata: &Metadata, phrases: &mut Vec) { /***** FORMATTERS *****/ /// Serializes a workflow to eFLINT using it's [`Display`]-implementation. pub struct WorkflowToEflint<'w>(pub &'w Workflow); -impl<'w> Display for WorkflowToEflint<'w> { +impl Display for WorkflowToEflint<'_> { #[inline] - fn fmt(&self, f: &mut Formatter<'_>) -> FResult { eflint_fmt(&self.0, f) } + fn fmt(&self, f: &mut Formatter<'_>) -> FResult { eflint_fmt(self.0, f) } } @@ -231,7 +231,7 @@ impl<'w> Visitor<'w> for DataAnalyzer<'w> { self.first.push((id.clone(), analyzer.first.into_iter().flat_map(|(_, data)| data).collect())); } self.last.clear(); - self.last.extend(analyzer.last.into_iter()); + self.last.extend(analyzer.last); // Continue with iteration Ok(Some(&elem.next)) @@ -413,7 +413,7 @@ impl<'w> Visitor<'w> for EFlintCompiler<'w> { self.phrases.push(create!(constr_app!("loop", node.clone()))); // Collect the inputs & outputs of the body - let mut analyzer = DataAnalyzer::new(&self.names); + let mut analyzer = DataAnalyzer::new(self.names); analyzer.visit(&elem.body)?; // Post-process the input into a list of body nodes and a list of data input diff --git a/brane-chk/src/workflow/preprocess.rs b/brane-chk/src/workflow/preprocess.rs index 551a3c3b..40762834 100644 --- a/brane-chk/src/workflow/preprocess.rs +++ b/brane-chk/src/workflow/preprocess.rs @@ -56,6 +56,10 @@ mod tests { } // Defines a few test files with expected inlinable functions + // NOTE: Sorry Clippy, it may be a complex type but at least you can see what's expected + // from every line. Not gonna go through the hassle of splitting this up for just a quick + // inline one. + #[allow(clippy::type_complexity)] let tests: [(&str, &str, HashMap>>); 5] = [ ("case1", r#"println("Hello, world!");"#, HashMap::from([(1, None)])), ( diff --git a/brane-cli/src/instance.rs b/brane-cli/src/instance.rs index a2d4abc9..194491b6 100644 --- a/brane-cli/src/instance.rs +++ b/brane-cli/src/instance.rs @@ -645,8 +645,8 @@ pub fn edit( if let Some(hostname) = hostname { // We replace the addresses. Any new ports will be handled in subsequent if let's println!("Updating hostname to {}...", style(&hostname.hostname).cyan().bold()); - info.api = Address::hostname(format!("{}", hostname.hostname), info.api.port); - info.drv = Address::hostname(format!("{}", hostname.hostname), info.drv.port); + info.api = Address::hostname(hostname.hostname.to_string(), info.api.port); + info.drv = Address::hostname(hostname.hostname.to_string(), info.drv.port); } if let Some(port) = api_port { println!("Updating API service port to {}...", style(port).cyan().bold()); diff --git a/brane-ctl/src/generate.rs b/brane-ctl/src/generate.rs index 7da647ea..48af2774 100644 --- a/brane-ctl/src/generate.rs +++ b/brane-ctl/src/generate.rs @@ -673,21 +673,21 @@ pub fn node( api: PublicService { name: api_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), api_port).into(), - address: Address::hostname(format!("{api_name}"), api_port), + address: Address::hostname(api_name.to_string(), api_port), - external_address: Address::hostname(format!("{hostname}"), api_port), + external_address: Address::hostname(hostname.to_string(), api_port), }, drv: PublicService { name: drv_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), drv_port).into(), - address: Address::hostname(format!("{drv_name}"), drv_port), + address: Address::hostname(drv_name.to_string(), drv_port), - external_address: Address::hostname(format!("{hostname}"), drv_port), + external_address: Address::hostname(hostname.to_string(), drv_port), }, plr: PrivateService { name: plr_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), plr_port).into(), - address: Address::hostname(format!("{plr_name}"), plr_port), + address: Address::hostname(plr_name.to_string(), plr_port), }, prx: if let Some(address) = external_proxy { PrivateOrExternalService::External(ExternalService { address }) @@ -695,7 +695,7 @@ pub fn node( PrivateOrExternalService::Private(PrivateService { name: prx_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), prx_port).into(), - address: Address::hostname(format!("{prx_name}"), prx_port), + address: Address::hostname(prx_name.to_string(), prx_port), }) }, @@ -806,16 +806,16 @@ pub fn node( reg: PublicService { name: reg_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), reg_port).into(), - address: Address::hostname(format!("{reg_name}"), reg_port), + address: Address::hostname(reg_name.to_string(), reg_port), - external_address: Address::hostname(format!("{hostname}"), reg_port), + external_address: Address::hostname(hostname.to_string(), reg_port), }, job: PublicService { name: job_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), job_port).into(), - address: Address::hostname(format!("{job_name}"), job_port), + address: Address::hostname(job_name.to_string(), job_port), - external_address: Address::hostname(format!("{hostname}"), job_port), + external_address: Address::hostname(hostname.to_string(), job_port), }, chk: DoublePrivateService { name: chk_name.clone(), @@ -829,7 +829,7 @@ pub fn node( PrivateOrExternalService::Private(PrivateService { name: prx_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), prx_port).into(), - address: Address::hostname(format!("{prx_name}"), prx_port), + address: Address::hostname(prx_name.to_string(), prx_port), }) }, }, @@ -867,9 +867,9 @@ pub fn node( prx: PublicService { name: prx_name.clone(), bind: SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), prx_port).into(), - address: Address::hostname(format!("{prx_name}"), prx_port), + address: Address::hostname(prx_name.to_string(), prx_port), - external_address: Address::hostname(format!("{hostname}"), prx_port), + external_address: Address::hostname(hostname.to_string(), prx_port), }, }, }), @@ -1140,8 +1140,8 @@ pub fn infra( for loc in locations { locs.insert(loc.0.clone(), InfraLocation { name: beautify_id(loc.0), - registry: Address::hostname(format!("{}", loc.1), 50051), - delegate: Address::hostname(format!("{}", loc.1), 50052), + registry: Address::hostname(loc.1.to_string(), 50051), + delegate: Address::hostname(loc.1.to_string(), 50052), }); } diff --git a/brane-job/src/worker.rs b/brane-job/src/worker.rs index 1e6ab97b..85829ced 100644 --- a/brane-job/src/worker.rs +++ b/brane-job/src/worker.rs @@ -593,6 +593,9 @@ async fn assert_task_permission( Ok(true) }, + // NOTE: Clippy is under the impression I'm OK with calling `info!()` multiple times. + // I'm not ~ this should be a single log statement. + #[allow(clippy::format_collect)] ReasonerResponse::Violated(reasons) => { info!("Checker DENIED execution of task {}{}", call, reasons.into_iter().map(|r| format!(" - {r}\n")).collect::()); Ok(false) @@ -722,6 +725,9 @@ async fn check_workflow_or_task( Ok(Response::new(Prost::>>::new(CheckResponse { verdict: ReasonerResponse::Success }))) }, + // NOTE: Clippy is under the impression I'm OK with calling `info!()` multiple times. + // I'm not ~ this should be a single log statement. + #[allow(clippy::format_collect)] ReasonerResponse::Violated(reasons) => { info!("Checker DENIED execution of {}{}", request.what(), reasons.iter().map(|r| format!(" - {r}\n")).collect::()); Ok(Response::new(Prost::>>::new(CheckResponse { verdict: ReasonerResponse::Violated(reasons) }))) diff --git a/brane-tsk/src/docker.rs b/brane-tsk/src/docker.rs index 9627f858..0e6c2d12 100644 --- a/brane-tsk/src/docker.rs +++ b/brane-tsk/src/docker.rs @@ -738,12 +738,10 @@ async fn read_digest(docker: &Docker, image: impl AsRef) -> Result