diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e0becc168..76619563d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -75,6 +75,37 @@ jobs: run: | RUSTDOCFLAGS='--deny warnings' cargo doc --no-deps + frontend_tests: + name: frontend tests + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + + - name: Setup NodeJS + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "pnpm" + + - name: Install Rust toolchain from file + uses: actions-rust-lang/setup-rust-toolchain@v1 + with: + # Don't override flags in cargo config files. + rustflags: "" + + - name: Build frontend + run: | + pnpm install + pnpm --filter ./packages/frontend run build -- --mode development + + - name: Run frontend tests + run: | + pnpm --filter ./packages/frontend run test:no-backend + npm_checks: name: npm checks runs-on: ubuntu-latest @@ -98,3 +129,26 @@ jobs: - name: Format/linting/import sorting run: | pnpm --filter "./packages/*" run ci + + build_nixos_system: + name: build NixOS system + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install Nix + uses: cachix/install-nix-action@v25 + with: + nix_path: nixpkgs=channel:25.05 + + - name: Configure Cachix + uses: cachix/cachix-action@v14 + with: + name: catcolab-jmoggr + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + + - name: Build the NixOS system for catcolab-next + run: nix build .#nixosConfigurations.catcolab-next.config.system.build.toplevel + diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index a777844ea..42d02f205 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -216,21 +216,36 @@ jobs: name: Deploy backend to AWS runs-on: ubuntu-latest if: github.event_name != 'pull_request' && github.ref_name == 'main' - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Set up SSH key - run: | - mkdir -p ~/.ssh - echo "${{ secrets.CATCOLAB_NEXT_DEPLOYUSER_KEY }}" > ~/.ssh/id_ed25519 - chmod 600 ~/.ssh/id_ed25519 - ssh-keyscan backend-next.catcolab.org >> ~/.ssh/known_hosts - - name: Rsync the repo to remote host - run: | - rsync -az --delete ./ catcolab@backend-next.catcolab.org:~/catcolab - - - name: Run nixos-rebuild switch remotely - run: - ssh catcolab@backend-next.catcolab.org "sudo nixos-rebuild switch --flake ./catcolab#catcolab-next" + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install Nix + uses: cachix/install-nix-action@v25 + with: + nix_path: nixpkgs=channel:25.05 + + - name: Configure Cachix + uses: cachix/cachix-action@v14 + with: + name: catcolab-jmoggr + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + + - name: Set the SSH key for deployment to catcolab-next + run: | + mkdir -p ~/.ssh + echo "${{ secrets.CATCOLAB_NEXT_DEPLOYUSER_KEY }}" > ~/.ssh/id_ed25519 + chmod 600 ~/.ssh/id_ed25519 + ssh-keyscan backend-next.catcolab.org >> ~/.ssh/known_hosts + + - name: Deploy to catcolab-next + run: | + nix run github:serokell/deploy-rs .#catcolab-next + + # Ensure that a copy of the deployed repository is on the machine that it was deployed to. This is + # a nice-to-have which enables checking the configuration of the currently deployed system and could + # make recovery slightly less aweful in the event nix commands need to be run on the remote. + - name: Rsync the repo to remote host + run: | + rsync -az --delete ./ catcolab@backend-next.catcolab.org:~/catcolab diff --git a/Cargo.lock b/Cargo.lock index c7e5ac2fc..04d3356e3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -256,6 +256,7 @@ dependencies = [ "hyper 1.6.0", "jsonrpsee 0.24.9", "jsonrpsee-server 0.24.9", + "notebook-types", "qubit", "regex", "sd-notify", diff --git a/flake.nix b/flake.nix index bb33beccc..0d137f049 100644 --- a/flake.nix +++ b/flake.nix @@ -320,57 +320,16 @@ }; }; - # The backend relies on Firebase, so tests require VM internet access. Enable networking by running - # with --no-sandbox. - # Docs for nixos tests: https://nixos.org/manual/nixos/stable/index.html#sec-nixos-test-nodes - # (google and LLMs are useless) - checks.x86_64-linux.integrationTests = nixpkgs.legacyPackages.x86_64-linux.testers.runNixOSTest { - name = "Integration Tests"; - - skipTypeCheck = true; - nodes = { - catcolab = import ./infrastructure/hosts/catcolab-vm; - }; - - node.specialArgs = { - inherit inputs self; - rustToolchain = rustToolchainLinux; - }; - - # NOTE: This only checks if the services "start" from systemds perspective, not if they are not - # failed immediately after starting... - testScript = '' - def dump_logs(machine, *units): - for u in units: - print(f"\n===== journal for {u} =====") - print(machine.succeed(f"journalctl -u {u} --no-pager")) - - def test_service(machine, service): - try: - machine.wait_for_unit(service) - except: - dump_logs(machine, service) - raise - - def test_oneshot_service(machine, service): - try: - machine.wait_until_succeeds( - f"test $(systemctl is-active {service}) = inactive" - ) - except: - dump_logs(machine, service) - raise - - test_oneshot_service(catcolab, "database-setup.service") - test_oneshot_service(catcolab, "migrations.service") - - test_service(catcolab, "automerge.service"); - test_service(catcolab, "backend.service"); - test_service(catcolab, "caddy.service"); - - catcolab.start_job("backupdb.service") - test_oneshot_service(catcolab, "backupdb.service") - ''; - }; + # Temporarily disabled until more meaningful tests are developed. Keeping the frontend dependecies + # up to date is currently not worth the hassle. + # checks.x86_64-linux.integrationTests = import ./infrastructure/tests/integration.nix { + # inherit + # nixpkgs + # inputs + # self + # linuxSystem + # ; + # rustToolchain = rustToolchainLinux; + # }; }; } diff --git a/infrastructure/hosts/catcolab-next/default.nix b/infrastructure/hosts/catcolab-next/default.nix index 316fb2455..905453010 100644 --- a/infrastructure/hosts/catcolab-next/default.nix +++ b/infrastructure/hosts/catcolab-next/default.nix @@ -25,7 +25,7 @@ in }; catcolabSecrets = { - file = ../../secrets/.env.next.age; + file = ../../secrets/env.next.age; mode = "400"; owner = "catcolab"; }; diff --git a/infrastructure/hosts/catcolab/default.nix b/infrastructure/hosts/catcolab/default.nix index 3bc72b391..ecbeca4c9 100644 --- a/infrastructure/hosts/catcolab/default.nix +++ b/infrastructure/hosts/catcolab/default.nix @@ -22,7 +22,7 @@ in owner = "catcolab"; }; catcolabSecrets = { - file = ../../secrets/.env.prod.age; + file = ../../secrets/env.prod.age; owner = "catcolab"; }; }; diff --git a/infrastructure/secrets/.env.next.age b/infrastructure/secrets/env.next.age similarity index 100% rename from infrastructure/secrets/.env.next.age rename to infrastructure/secrets/env.next.age diff --git a/infrastructure/secrets/.env.prod.age b/infrastructure/secrets/env.prod.age similarity index 100% rename from infrastructure/secrets/.env.prod.age rename to infrastructure/secrets/env.prod.age diff --git a/infrastructure/secrets/secrets.nix b/infrastructure/secrets/secrets.nix index b24f83c20..e610699e3 100644 --- a/infrastructure/secrets/secrets.nix +++ b/infrastructure/secrets/secrets.nix @@ -7,14 +7,14 @@ let catcolab-next-deployuser = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM7AYg1fZM0zMxb/BuZTSwK4O3ycUIHruApr1tKoO8nJ deployuser@next.catcolab.org"; in builtins.mapAttrs (_: publicKeys: { inherit publicKeys; }) ({ - ".env.next.age" = [ + "env.next.age" = [ catcolab-next owen epatters jmoggr catcolab-next-deployuser ]; - ".env.prod.age" = [ + "env.prod.age" = [ catcolab owen epatters diff --git a/infrastructure/tests/integration.nix b/infrastructure/tests/integration.nix new file mode 100644 index 000000000..405bac3fd --- /dev/null +++ b/infrastructure/tests/integration.nix @@ -0,0 +1,59 @@ +# The backend relies on Firebase, so tests require VM internet access. Enable networking by running +# with --no-sandbox. +# Docs for nixos tests: https://nixos.org/manual/nixos/stable/index.html#sec-nixos-test-nodes +# (google and LLMs are useless) +{ + nixpkgs, + inputs, + self, + rustToolchain, + linuxSystem, +}: +nixpkgs.legacyPackages.${linuxSystem}.testers.runNixOSTest { + name = "Integration Tests"; + + skipTypeCheck = true; + + nodes = { + catcolab = import ../hosts/catcolab-vm; + }; + + node.specialArgs = { + inherit inputs self rustToolchain; + }; + + # NOTE: This only checks if the services "start" from systemds perspective, not if they are not + # failed immediately after starting... + testScript = '' + def dump_logs(machine, *units): + for u in units: + print(f"\n===== journal for {u} =====") + print(machine.succeed(f"journalctl -u {u} --no-pager")) + + def test_service(machine, service): + try: + machine.wait_for_unit(service) + except: + dump_logs(machine, service) + raise + + def test_oneshot_service(machine, service): + try: + machine.wait_until_succeeds( + f"test $(systemctl is-active {service}) = inactive" + ) + except: + dump_logs(machine, service) + raise + + test_oneshot_service(catcolab, "database-setup.service") + test_oneshot_service(catcolab, "migrations.service") + + test_service(catcolab, "automerge.service"); + test_service(catcolab, "backend.service"); + test_service(catcolab, "caddy.service"); + + catcolab.start_job("backupdb.service") + test_oneshot_service(catcolab, "backupdb.service") + ''; +} diff --git a/packages/automerge-doc-server/src/server.ts b/packages/automerge-doc-server/src/server.ts index c15e1efc3..4853a3b9e 100644 --- a/packages/automerge-doc-server/src/server.ts +++ b/packages/automerge-doc-server/src/server.ts @@ -15,11 +15,21 @@ import type { Pool as PoolType } from "pg"; import { PostgresStorageAdapter } from "./postgres_storage_adapter.js"; import type { NewDocSocketResponse, StartListeningSocketResponse } from "./types.js"; import type { SocketIOHandlers } from "./socket.js"; +import { serializeError } from "./socket.js"; import jsonpatch from "fast-json-patch"; // Load environment variables from .env dotenv.config(); +/** Attempt to migrate a document, returning the migrated document or an error message. */ +function migrateDocument(doc: unknown): { Ok: any } | { Err: string } { + try { + return { Ok: notbookTypes.migrateDocument(doc) }; + } catch (e) { + return { Err: `Failed to migrate document: ${serializeError(e)}` }; + } +} + export class AutomergeServer implements SocketIOHandlers { private docMap: Map>; @@ -66,7 +76,12 @@ export class AutomergeServer implements SocketIOHandlers { } async createDoc(content: unknown): Promise { - const handle = this.repo.create(content); + const migrateResult = migrateDocument(content); + if ("Err" in migrateResult) { + return migrateResult; + } + + const handle = this.repo.create(migrateResult.Ok); if (!handle) { return { Err: "Failed to create a new document", @@ -133,14 +148,18 @@ export class AutomergeServer implements SocketIOHandlers { // // XXX: frontend/src/api/document.ts needs to be kept up to date with this const docBefore = handle.doc(); - const docAfter = notbookTypes.migrateDocument(docBefore); + const migrateResult = migrateDocument(docBefore); + if ("Err" in migrateResult) { + return migrateResult; + } + const docAfter = migrateResult.Ok; + if ((docBefore as any).version !== docAfter.version) { const patches = jsonpatch.compare(docBefore as any, docAfter); handle.change((doc: any) => { jsonpatch.applyPatch(doc, patches); }); } - this.docMap.set(refId, handle); return { Ok: null }; diff --git a/packages/automerge-doc-server/src/socket.ts b/packages/automerge-doc-server/src/socket.ts index 67f8d023b..8fb2ef15a 100644 --- a/packages/automerge-doc-server/src/socket.ts +++ b/packages/automerge-doc-server/src/socket.ts @@ -1,18 +1,39 @@ import { type Socket, io } from "socket.io-client"; -import type { JsonValue, RefContent } from "../../backend/pkg/src/index.ts"; +import type { JsonValue } from "../../backend/pkg/src/index.ts"; import type { NewDocSocketResponse, StartListeningSocketResponse } from "./types.js"; +/** Serialize an error to a meaningful string message. */ +export function serializeError(error: unknown): string { + // unwraps in rust wasm throw strings + if (typeof error === "string") { + return error; + } else if (error instanceof Error) { + return error.message; + } else { + console.warn("Serializing an invalid error object", error); + if (error && typeof error === "object" && "message" in error) { + return String(error.message); + } else { + try { + return String(error); + } catch { + return "Unknown error (failed to serialize)"; + } + } + } +} + /** Messages handled by the `SocketServer`. */ export type SocketIOHandlers = { - createDoc: (data: RefContent) => Promise; + createDoc: (content: unknown) => Promise; cloneDoc: (docId: string) => Promise; startListening: (refId: string, docId: string) => Promise; }; /** Messages emitted by the `SocketServer`. */ export type Requests = { - autosave: (data: RefContent) => void; + autosave: (data: unknown) => void; }; // Convert the type of each property from `(...args) => ret` to `(...args, (ret) => void) => void` to @@ -38,7 +59,18 @@ function registerHandler< const result = handler.apply(automergeServer, args); // Wrap result in a promise so this doesn't blow up when a sync handler is added in the future - Promise.resolve(result).then(callback); + Promise.resolve(result) + .then(callback) + .catch((error) => { + console.error(`Error in socket handler '${String(event)}':`, error); + console.error("Arguments:", JSON.stringify(args, null, 2)); + + if (error instanceof Error) { + console.error("Stack trace:", error.stack); + } + + callback({ Err: serializeError(error) }); + }); }; socket.on(event as any, listener); diff --git a/packages/backend/Cargo.toml b/packages/backend/Cargo.toml index d1c85513a..94a392c98 100644 --- a/packages/backend/Cargo.toml +++ b/packages/backend/Cargo.toml @@ -16,6 +16,7 @@ http = "1.1.0" hyper = { version = "1.4.1", features = ["server"] } jsonrpsee = "0.24.6" jsonrpsee-server = "0.24.6" +notebook-types = { version = "0.1.0", path = "../notebook-types" } qubit = "0.10.2" regex = "1.11.1" sd-notify = "0.4.5" diff --git a/packages/backend/default.nix b/packages/backend/default.nix index 5af7fffb6..8ea50d030 100644 --- a/packages/backend/default.nix +++ b/packages/backend/default.nix @@ -31,6 +31,7 @@ craneLib.buildPackage { ../../Cargo.toml ../../Cargo.lock (craneLib.fileset.commonCargoSources ./.) + (craneLib.fileset.commonCargoSources ../notebook-types) ./.sqlx ]; }; diff --git a/packages/backend/pkg/src/NewPermissions.ts b/packages/backend/pkg/src/NewPermissions.ts index 436d26ebe..8ec15a233 100644 --- a/packages/backend/pkg/src/NewPermissions.ts +++ b/packages/backend/pkg/src/NewPermissions.ts @@ -9,8 +9,9 @@ export type NewPermissions = { * Base permission level for any person, logged in or not. */ anyone: PermissionLevel | null, -/** Permission levels for users. - - A mapping from user IDs to permission levels. - */ +/** + * Permission levels for users. + * + * A mapping from user IDs to permission levels. + */ users: { [key in string]?: PermissionLevel }, }; diff --git a/packages/backend/pkg/src/Permissions.ts b/packages/backend/pkg/src/Permissions.ts index 276d763a0..c51530d17 100644 --- a/packages/backend/pkg/src/Permissions.ts +++ b/packages/backend/pkg/src/Permissions.ts @@ -14,8 +14,9 @@ anyone: PermissionLevel | null, * Permission level for the current user. */ user: PermissionLevel | null, -/** Permission levels for all other users. - - Only owners of the document have access to this information. - */ +/** + * Permission levels for all other users. + * + * Only owners of the document have access to this information. + */ users: Array | null, }; diff --git a/packages/backend/pkg/src/RefStub.ts b/packages/backend/pkg/src/RefStub.ts index 8f049c144..646e21be6 100644 --- a/packages/backend/pkg/src/RefStub.ts +++ b/packages/backend/pkg/src/RefStub.ts @@ -3,8 +3,7 @@ import type { PermissionLevel } from "./PermissionLevel"; import type { UserSummary } from "./UserSummary"; /** - * A subset of user relevant information about a ref. Used for showing - * users information on a variety of refs without having to load whole - * refs. + * A subset of user relevant information about a ref. Used for showing users + * information on a variety of refs without having to load whole refs. */ export type RefStub = { name: string, typeName: string, refId: string, permissionLevel: PermissionLevel, owner: UserSummary | null, createdAt: string, }; diff --git a/packages/backend/pkg/src/UserSummary.ts b/packages/backend/pkg/src/UserSummary.ts index 7c176c563..bf594730a 100644 --- a/packages/backend/pkg/src/UserSummary.ts +++ b/packages/backend/pkg/src/UserSummary.ts @@ -1,8 +1,9 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -/** Summary of a user. - -The minimal information needed to uniquely identify a user and display the user -in human-readable form. +/** + * Summary of a user. + * + * The minimal information needed to uniquely identify a user and display the user + * in human-readable form. */ export type UserSummary = { id: string, username: string | null, displayName: string | null, }; diff --git a/packages/backend/pkg/src/index.ts b/packages/backend/pkg/src/index.ts index bd366e788..c6b4810ee 100644 --- a/packages/backend/pkg/src/index.ts +++ b/packages/backend/pkg/src/index.ts @@ -21,8 +21,8 @@ import type { NewPermissions } from "./NewPermissions.ts"; import type { UserSummary } from "./UserSummary.ts"; import type { UsernameStatus } from "./UsernameStatus.ts"; import type { UserProfile } from "./UserProfile.ts"; -import type { Paginated } from "./Paginated.ts"; import type { RefStub } from "./RefStub.ts"; +import type { Paginated } from "./Paginated.ts"; import type { RefQueryParams } from "./RefQueryParams.ts"; export type { RpcResult } from "./RpcResult.ts"; @@ -36,8 +36,8 @@ export type { NewPermissions } from "./NewPermissions.ts"; export type { UserSummary } from "./UserSummary.ts"; export type { UsernameStatus } from "./UsernameStatus.ts"; export type { UserProfile } from "./UserProfile.ts"; -export type { Paginated } from "./Paginated.ts"; export type { RefStub } from "./RefStub.ts"; +export type { Paginated } from "./Paginated.ts"; export type { RefQueryParams } from "./RefQueryParams.ts"; export type QubitServer = { new_ref: Mutation<[content: JsonValue, ], RpcResult>, get_doc: Query<[ref_id: string, ], RpcResult>, head_snapshot: Query<[ref_id: string, ], RpcResult>, create_snapshot: Mutation<[ref_id: string, ], RpcResult>, get_permissions: Query<[ref_id: string, ], RpcResult>, set_permissions: Mutation<[ref_id: string, new: NewPermissions, ], RpcResult>, validate_session: Query<[], RpcResult>, sign_up_or_sign_in: Mutation<[], RpcResult>, user_by_username: Query<[username: string, ], RpcResult>, username_status: Query<[username: string, ], RpcResult>, get_active_user_profile: Query<[], RpcResult>, set_active_user_profile: Mutation<[user: UserProfile, ], RpcResult>, search_ref_stubs: Query<[query_params: RefQueryParams, ], RpcResult>> }; \ No newline at end of file diff --git a/packages/backend/src/document.rs b/packages/backend/src/document.rs index 02933dd60..9e902721c 100644 --- a/packages/backend/src/document.rs +++ b/packages/backend/src/document.rs @@ -12,6 +12,10 @@ use crate::{auth::PermissionLevel, user::UserSummary}; /// Creates a new document ref with initial content. pub async fn new_ref(ctx: AppCtx, content: Value) -> Result { + // Validate document structure by attempting to deserialize it + let _validated_doc: notebook_types::VersionedDocument = serde_json::from_value(content.clone()) + .map_err(|e| AppError::Invalid(format!("Failed to parse document: {}", e)))?; + let ref_id = Uuid::now_v7(); // If the document is created but the db transaction doesn't complete, then the document will be diff --git a/packages/catlog-wasm/src/model.rs b/packages/catlog-wasm/src/model.rs index 9e9494f96..edb172486 100644 --- a/packages/catlog-wasm/src/model.rs +++ b/packages/catlog-wasm/src/model.rs @@ -1,5 +1,8 @@ //! Wasm bindings for models of a double theory. +use std::collections::HashMap; +use std::rc::Rc; + use all_the_same::all_the_same; use derive_more::{From, TryInto}; use nonempty::NonEmpty; @@ -258,19 +261,46 @@ impl CanQuote for Quoter { /// A box containing a model of a double theory of any kind. /// /// See [`DblTheoryBox`] for motivation. -#[allow(clippy::large_enum_variant)] -#[derive(From, TryInto)] -#[try_into(ref, ref_mut)] +#[derive(Clone, From, TryInto)] +#[try_into(ref)] pub enum DblModelBox { /// A model of a discrete double theory. - Discrete(dbl_model::DiscreteDblModel), + Discrete(Rc), /// A model of a discrete tabulator theory. - DiscreteTab(dbl_model::DiscreteTabModel), + DiscreteTab(Rc), /// A model of a modal double theory. - Modal(dbl_model::ModalDblModel), + Modal(Rc), +} + +impl From for DblModelBox { + fn from(value: dbl_model::DiscreteDblModel) -> Self { + Rc::new(value).into() + } +} +impl From for DblModelBox { + fn from(value: dbl_model::DiscreteTabModel) -> Self { + Rc::new(value).into() + } +} +impl From for DblModelBox { + fn from(value: dbl_model::ModalDblModel) -> Self { + Rc::new(value).into() + } +} + +impl DblModelBox { + /// Constructs an empty boxed model of a double theory. + pub fn new(theory: &DblTheory) -> Self { + match &theory.0 { + DblTheoryBox::Discrete(th) => dbl_model::DiscreteDblModel::new(th.clone()).into(), + DblTheoryBox::DiscreteTab(th) => dbl_model::DiscreteTabModel::new(th.clone()).into(), + DblTheoryBox::Modal(th) => dbl_model::ModalDblModel::new(th.clone()).into(), + } + } } /// Wasm binding of a model of a double theory. +#[derive(Clone)] #[wasm_bindgen] pub struct DblModel { /// The boxed underlying model. @@ -281,43 +311,40 @@ pub struct DblModel { } impl DblModel { - /// Constructs a new model of a double theory. + /// Constructs an empty model of a double theory. pub fn new(theory: &DblTheory) -> Self { - let model = match &theory.0 { - DblTheoryBox::Discrete(th) => dbl_model::DiscreteDblModel::new(th.clone()).into(), - DblTheoryBox::DiscreteTab(th) => dbl_model::DiscreteTabModel::new(th.clone()).into(), - DblTheoryBox::Modal(th) => dbl_model::ModalDblModel::new(th.clone()).into(), - }; Self { - model, + model: DblModelBox::new(theory), ob_namespace: Namespace::new_for_uuid(), mor_namespace: Namespace::new_for_uuid(), } } - /// Tries to get a model of a discrete theory. - pub fn discrete(&self) -> Result<&dbl_model::DiscreteDblModel, String> { - (&self.model) - .try_into() - .map_err(|_| "Model should be of a discrete theory".into()) + /// Replaces the boxed model while preserving the namespaces. + pub fn replace_box(&self, model: DblModelBox) -> Self { + Self { + model, + ob_namespace: self.ob_namespace.clone(), + mor_namespace: self.mor_namespace.clone(), + } } - /// Tries to get a model of a discrete theory, by mutable reference. - pub fn discrete_mut(&mut self) -> Result<&mut dbl_model::DiscreteDblModel, String> { - (&mut self.model) + /// Tries to get a model of a discrete theory. + pub fn discrete(&self) -> Result<&Rc, String> { + (&self.model) .try_into() .map_err(|_| "Model should be of a discrete theory".into()) } /// Tries to get a model of a discrete tabulator theory. - pub fn discrete_tab(&self) -> Result<&dbl_model::DiscreteTabModel, String> { + pub fn discrete_tab(&self) -> Result<&Rc, String> { (&self.model) .try_into() .map_err(|_| "Model should be of a discrete tabulator theory".into()) } /// Tries to get a model of a modal theory. - pub fn modal(&self) -> Result<&dbl_model::ModalDblModel, String> { + pub fn modal(&self) -> Result<&Rc, String> { (&self.model).try_into().map_err(|_| "Model should be of a modal theory".into()) } @@ -325,6 +352,7 @@ impl DblModel { pub fn add_ob(&mut self, decl: &ObDecl) -> Result<(), String> { all_the_same!(match &mut self.model { DblModelBox::[Discrete, DiscreteTab, Modal](model) => { + let model = Rc::make_mut(model); let ob_type = Elaborator.elab(&decl.ob_type)?; model.add_ob(decl.id.into(), ob_type); } @@ -339,6 +367,7 @@ impl DblModel { pub fn add_mor(&mut self, decl: &MorDecl) -> Result<(), String> { all_the_same!(match &mut self.model { DblModelBox::[Discrete, DiscreteTab, Modal](model) => { + let model = Rc::make_mut(model); let mor_type = Elaborator.elab(&decl.mor_type)?; model.make_mor(decl.id.into(), mor_type); if let Some(dom) = decl.dom.as_ref().map(|ob| Elaborator.elab(ob)).transpose()? { @@ -533,17 +562,50 @@ pub fn collect_product(ob: Ob) -> Result, String> { Ok(vec.into_iter().map(|ob| Quoter.quote(&ob)).collect()) } +/// A named collection of models of double theories. +#[derive(Default)] +#[wasm_bindgen] +pub struct DblModelMap(#[wasm_bindgen(skip)] pub HashMap); + +#[wasm_bindgen] +impl DblModelMap { + /// Constructs an empty collection of models. + #[wasm_bindgen(constructor)] + pub fn new() -> Self { + Default::default() + } + + /// Returns whether the collection contains a model with the given name. + #[wasm_bindgen(js_name = "has")] + pub fn contains_key(&self, id: &str) -> bool { + self.0.contains_key(id) + } + + /// Inserts a model with the given name. + #[wasm_bindgen(js_name = "set")] + pub fn insert(&mut self, id: String, model: &DblModel) { + self.0.insert(id, model.clone()); + } +} + /// Elaborates a model defined by a notebook into a catlog model. #[wasm_bindgen(js_name = "elaborateModel")] pub fn elaborate_model( - judgments: Vec, + notebook: &ModelNotebook, + instantiated: &DblModelMap, theory: &DblTheory, ) -> Result { let mut model = DblModel::new(theory); - for judgment in judgments { + for judgment in notebook.0.formal_content() { match judgment { - ModelJudgment::Object(decl) => model.add_ob(&decl)?, - ModelJudgment::Morphism(decl) => model.add_mor(&decl)?, + ModelJudgment::Object(decl) => model.add_ob(decl)?, + ModelJudgment::Morphism(decl) => model.add_mor(decl)?, + ModelJudgment::Instantiation(inst) => { + if let Some(link) = &inst.model { + assert!(instantiated.0.contains_key(&link.stable_ref.id)); + } + // FIXME: Do something with the instantiation. + } } } Ok(model) diff --git a/packages/catlog-wasm/src/model_diagram.rs b/packages/catlog-wasm/src/model_diagram.rs index 40ba19ed9..d626c58df 100644 --- a/packages/catlog-wasm/src/model_diagram.rs +++ b/packages/catlog-wasm/src/model_diagram.rs @@ -1,5 +1,7 @@ //! Wasm bindings for diagrams in models of a double theory. +use std::rc::Rc; + use all_the_same::all_the_same; use derive_more::From; use serde::{Deserialize, Serialize}; @@ -7,6 +9,7 @@ use tsify::Tsify; use uuid::Uuid; use wasm_bindgen::prelude::*; +use catlog::dbl::discrete::model_diagram::InvalidDiscreteDblModelDiagram; use catlog::dbl::model::{DblModel as _, DiscreteDblModel, FgDblModel, MutDblModel}; use catlog::dbl::model_diagram as diagram; use catlog::dbl::model_morphism::DiscreteDblModelMapping; @@ -16,10 +19,10 @@ use catlog::zero::{ }; use notebook_types::current::*; -use super::model::{DblModel, DblModelBox}; +use super::model::DblModel; use super::notation::*; use super::result::JsResult; -use super::theory::DblTheory; +use super::theory::{DblTheory, DblTheoryBox}; /// A box containing a diagram in a model of a double theory. #[derive(From)] @@ -41,10 +44,10 @@ pub struct DblModelDiagram { impl DblModelDiagram { /// Creates an empty diagram for the given theory. pub fn new(theory: &DblTheory) -> Self { - let model = DblModel::new(theory); - let diagram = match model.model { - DblModelBox::Discrete(model) => { + let diagram = match &theory.0 { + DblTheoryBox::Discrete(theory) => { let mapping = Default::default(); + let model = DiscreteDblModel::new(theory.clone()); diagram::DblModelDiagram(mapping, model).into() } _ => panic!("Diagrams only implemented for discrete double theories"), @@ -281,7 +284,7 @@ impl DblModelDiagram { pub fn infer_missing_from(&mut self, model: &DblModel) -> Result<(), String> { all_the_same!(match &mut self.diagram { DblModelDiagramBox::[Discrete](diagram) => { - let model = (&model.model).try_into().map_err( + let model: &Rc<_> = (&model.model).try_into().map_err( |_| "Type of model should match type of diagram")?; diagram.infer_missing_from(model); } @@ -304,7 +307,7 @@ impl DblModelDiagram { pub fn validate_in(&self, model: &DblModel) -> Result { let result = all_the_same!(match &self.diagram { DblModelDiagramBox::[Discrete](diagram) => { - let model = (&model.model).try_into().map_err( + let model: &Rc<_> = (&model.model).try_into().map_err( |_| "Type of model should match type of diagram")?; diagram.validate_in(model) } @@ -325,9 +328,7 @@ fn expect_single_uuid(name: &QualifiedName) -> Uuid { /// Result of validating a diagram in a model. #[derive(Serialize, Deserialize, Tsify)] #[tsify(into_wasm_abi, from_wasm_abi)] -pub struct ModelDiagramValidationResult( - pub JsResult<(), Vec>, -); +pub struct ModelDiagramValidationResult(pub JsResult<(), Vec>); /// Elaborates a diagram defined by a notebook into a catlog diagram. #[wasm_bindgen(js_name = "elaborateDiagram")] diff --git a/packages/catlog-wasm/src/theories.rs b/packages/catlog-wasm/src/theories.rs index 8ac315f10..e3b2b01fc 100644 --- a/packages/catlog-wasm/src/theories.rs +++ b/packages/catlog-wasm/src/theories.rs @@ -51,13 +51,13 @@ impl ThCategory { /// Sigma migrates a category to a schema. #[wasm_bindgen(js_name = "toSchema")] - pub fn to_schema(mut model: DblModel, th_schema: &DblTheory) -> Result { - let th = th_schema.discrete()?; - model.discrete_mut()?.push_forward( + pub fn to_schema(boxed: &DblModel, th_schema: &DblTheory) -> Result { + let (th, mut model) = (th_schema.discrete()?, boxed.discrete()?.as_ref().clone()); + model.push_forward( &theory_morphisms::th_category_to_schema().functor_into(&th.0), th.clone(), ); - Ok(model) + Ok(boxed.replace_box(model.into())) } } @@ -79,13 +79,13 @@ impl ThSchema { /// Sigma migrates a schema to a category. #[wasm_bindgen(js_name = "toCategory")] - pub fn to_category(mut model: DblModel, th_category: &DblTheory) -> Result { - let th = th_category.discrete()?; - model.discrete_mut()?.push_forward( + pub fn to_category(boxed: &DblModel, th_category: &DblTheory) -> Result { + let (th, mut model) = (th_category.discrete()?, boxed.discrete()?.as_ref().clone()); + model.push_forward( &theory_morphisms::th_schema_to_category().functor_into(&th.0), th.clone(), ); - Ok(model) + Ok(boxed.replace_box(model.into())) } } @@ -226,14 +226,14 @@ impl ThDelayableSignedCategory { /// Sigma migrates a delayable signed category to a signed category. #[wasm_bindgen(js_name = "toSignedCategory")] - pub fn to_signed_category(mut model: DblModel, th: &DblTheory) -> Result { - let th = th.discrete()?; - model.discrete_mut()?.push_forward( + pub fn to_signed_category(boxed: &DblModel, th: &DblTheory) -> Result { + let (th, mut model) = (th.discrete()?, boxed.discrete()?.as_ref().clone()); + model.push_forward( &theory_morphisms::th_delayable_signed_category_to_signed_category() .functor_into(&th.0), th.clone(), ); - Ok(model) + Ok(boxed.replace_box(model.into())) } } diff --git a/packages/catlog/src/dbl/discrete/model.rs b/packages/catlog/src/dbl/discrete/model.rs index f71749f92..7db796075 100644 --- a/packages/catlog/src/dbl/discrete/model.rs +++ b/packages/catlog/src/dbl/discrete/model.rs @@ -149,6 +149,10 @@ impl Category for DiscreteDblModel { fn compose(&self, path: Path) -> Self::Mor { self.category.compose(path) } + + fn morphisms_are_equal(&self, p1: Self::Mor, p2: Self::Mor) -> bool { + self.category.morphisms_are_equal(p1, p2) + } } impl FgCategory for DiscreteDblModel { diff --git a/packages/catlog/src/dbl/model_diagram.rs b/packages/catlog/src/dbl/model_diagram.rs index fd3d5af48..8b2572856 100644 --- a/packages/catlog/src/dbl/model_diagram.rs +++ b/packages/catlog/src/dbl/model_diagram.rs @@ -15,13 +15,28 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "serde-wasm")] use tsify::Tsify; -pub use super::discrete::model_diagram::*; +use nonempty::NonEmpty; +use std::collections::HashSet; -/// A diagram in a model of a double theory. -/// -/// This struct owns its data, namely, the domain of the diagram (a model) and the -/// model mapping itself. -#[derive(Clone, Into)] +use crate::dbl::{ + discrete::model::DiscreteDblModel, discrete::model_diagram::*, model::MutDblModel, +}; +use crate::one::{ + QualifiedPath, + category::{Category, FgCategory}, + functor::FgCategoryMap, + graph::{FinGraph, Graph}, + graph_algorithms::bounded_simple_paths, +}; +use crate::validate::{self}; +use crate::zero::{HashColumn, QualifiedName, column::MutMapping}; + +/** A diagram in a model of a double theory. + +This struct owns its data, namely, the domain of the diagram (a model) and the +model mapping itself. +*/ +#[derive(Clone, Into, PartialEq)] #[into(owned, ref, ref_mut)] pub struct DblModelDiagram(pub Map, pub Dom); @@ -38,3 +53,541 @@ pub enum InvalidDblModelDiagram { /// Mapping underlying the diagram is invalid. Map(MapErr), } + +/// WARNING/TODO: these docs are incomplete +/// +/// A morphism between instances (presented as diagrams in some category) +/// consists in an assignment, for each representable j in the domain instance, +/// of a choice of representable j' in the codomain instance plus a morphism in +/// the underlying category out of the representing object of j'. +/// +/// For example, one sends some domain representable vertex v to the source of +/// some codomain representable edge, e. There may be no outgoing morphism for e +/// in the codomain shape category, but it's implicitly there because the +/// the morphism data of the v component comes from the underlying category. +/// +/// +/// Let D: J β†’ π’ž and D': J' β†’ π’ž be two diagrams. +/// For any j' ∈ J', let D'j' be a functor 1 β†’ π’ž, picking out D'(j') ∈ π’ž. +/// Consider the comma category D'j' ↓ D: +/// - Its objects are pairs (j ∈ J, D'j' β†’ Dj ∈ Hom π’ž) +/// - Its morphisms are triples (f: j₁ β†’ jβ‚‚, s: D'j β†’ Dj₁, t: D'j β†’ Djβ‚‚) +/// such that s;Df = t +/// +/// Ο€β‚€ is a functor Cat β†’ Set sending each category to its set of connected +/// components. +/// +/// Ο€β‚€(D'j' ↓ D), then, is a set with elements which are pairs +/// (j ∈ J, D'j' β†’ Dj ∈ Hom π’ž), but we regard two as equal if there exists a +/// zig-zag of morphisms in D'j' ↓ D. +/// +/// We are interested in Hom(D,D'), the morphisms between the instances +/// presented by D and D'. This is equal to lim(Ο€β‚€(D'j' ↓ D)) +/// The solutions to the limit problem are a choice, for each j ∈ J, of some +/// object of Ο€β‚€(D'j' ↓ D), such that for all q: j₁ β†’ jβ‚‚, there is a morphism +/// Ξ±(j₁) in π’ž such that ... +/// +/// +/// This struct borrows its data to perform validation. The domain and codomain are +/// assumed to be valid models of double theories. If that is in question, the +/// models should be validated *before* validating this object. +pub struct InstanceMorphism<'a>( + pub &'a HashColumn, + pub &'a DiscreteDblModelDiagram, + pub &'a DiscreteDblModelDiagram, +); + +/// An instance morphism must provide a component for each element of the domain +/// MissingComponent marks if one is missing, whereas IllSpecifiedComponent marks +/// that the component is improperly specified somehow. +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "serde", serde(tag = "tag", content = "err"))] +#[cfg_attr(feature = "serde-wasm", derive(Tsify))] +#[cfg_attr(feature = "serde-wasm", tsify(into_wasm_abi, from_wasm_abi))] +pub enum InvalidInstanceMorphism { + /// Domain of the diagram is invalid. + MissingComponent(QualifiedName), + + /// Mapping underlying the diagram is invalid. + IllSpecifiedComponent(String, QualifiedName), + + /// Mapping underlying the diagram is unnatural. (f: a->b, Ξ±(a), Ξ±(b)) + UnnaturalComponent(QualifiedName), +} + +impl DiscreteDblModelDiagram { + /// WARNING: this code ignores the ob/mor types in model.generating_graph + /// this could lead to spurious morphisms - we really should be making sure + /// we respect the coloring in some to-be-specified way. + /// + /// Because morphisms within the shape J of a diagram J β†’ π’ž are representing + /// equalities, they can be traversed in either direction. The natural + /// notion of the same object c in π’ž be presented in two different ways, + /// (j1,D(j1)β†’c) and (j2,D(j2)β†’c), involves traversing these morphisms in + /// either direction. + /// + /// Given a diagram D: J β†’ π’ž, and a cospan of π’ž morphisms src: D(j1)β†’c + /// and tgt: D(j2)β†’c, decide whether *some* zigzag path of morphisms in D/c + /// from src to tgt. + /// + /// There are some concerns about the accuracy of this approach when π’ž has + /// loops, as it is not possible to fully enumerate all paths of morphism + /// generators between two objects in π’ž. Thus there may be false negatives. + fn zigzag( + &self, + j1: QualifiedName, + j2: QualifiedName, + src: QualifiedPath, + tgt: QualifiedPath, + model: &DiscreteDblModel, + len: Option, + ) -> bool { + let f = self.0.functor_into(model); + + // Our worklist represents the zigzags out of j1 that are in the same + // connected component. We do not remember which zigzag in J it is, just + // its domain object in J and the morphism's image in π’ž. + // So, each element in the work list is an object j (which is connected + // to j1 via *some* sequence of morphisms in J) and a morphism D(j) β†’ c + let mut worklist: Vec<(QualifiedName, QualifiedPath)> = vec![(j1.clone(), src.clone())]; + + // If we ever come across a pair (j, D(j) β†’ c) we have seen before, + // then we do not need to add it to the worklist. Determining whether or + // not src and tgt are in the same connected component does not need us + // to track which zig zag in particular led us to this pair, so there is + // no need to ever have cycles in our tree search. + let mut seen: HashSet<(QualifiedName, QualifiedPath)> = HashSet::new(); + + let jg = self.1.generating_graph(); + let g = model.generating_graph(); + + // This is the distinguished object in π’ž that we care about. We are only + // interested in π’ž/c for the purposes of determining whether src and tgt + // are mutually reachable. + let c = src.tgt(g); + + let dj1 = f.ob_generator_map().get(&j1).unwrap(); + let dj2 = f.ob_generator_map().get(&j2).unwrap(); + + // Confirm that src and tgt are a cospan D(j1) β†’ c ← D(j2) + if *dj1 != src.src(g) { + panic!("Bad input D({j1:?}) = {dj1:?} != src({src:?}") + } else if *dj2 != tgt.src(g) { + panic!("Bad input D({j2:?}) = {dj2:?} != src({tgt:?}") + } else if c != tgt.tgt(g) { + panic!("Bad input tgt({tgt:?})!={c:?}") + } + + // Process worklist until it is empty. This will terminate because `len` + // restricts us to looking at only a finite subset of the morphisms in + // π’ž/c, and there are only a finite number of objects in J, so there are + // a finite number of (j, D(j) β†’ c) pairs (which we will never repeat) + while let Some((j, m)) = worklist.pop() { + // Check if this pair is the tgt we have been trying to reach + if model.morphisms_are_equal(m.clone(), tgt.clone()) { + return true; + } + + // Add to seen cache + seen.insert((j.clone(), m.clone())); + + // Suppose h: j' β†’ j in J. Then (j', D(h) β‹… m ) is connected via a + // zig zag to (j, m). + for jh in jg.in_edges(&j) { + let new_j: QualifiedName = jg.src(&jh); + let h = f.mor_generator_map().get(&jh).unwrap().clone(); + let new_m = h.concat_in(g, m.clone()).unwrap(); + let new_tup = (new_j, new_m); + if !seen.contains(&new_tup) { + worklist.push(new_tup); + } + } + // Suppose h: j β†’ j' in J. Then c ← j β†’ D(j') can be completed to a + // commuting triangle in many ways: we search for all morphisms + // D(j') β†’ c that make the triangle commute. + for jh in jg.out_edges(&j) { + let new_j = jg.tgt(&jh); + + let h = f.mor_generator_map().get(&jh).unwrap(); + for new_m in bounded_simple_paths(g, &h.tgt(g), &c, len) { + // check that the triangle commutes + if model.morphisms_are_equal( + m.clone(), + h.clone().concat_in(g, new_m.clone()).unwrap(), + ) { + let new_tup = (new_j.clone(), new_m); + if !seen.contains(&new_tup) { + worklist.push(new_tup); + } + } + } + } + } + false // no zigzag (given `len`-limited exploration of π’ž/c) exists + } +} + +impl<'a> InstanceMorphism<'a> { + /** Validates that the instance morphism is well-defined. + + Assumes that the dom/codom diagrams are valid. If not, this may panic. + */ + pub fn validate_in( + &self, + model: &DiscreteDblModel, + ) -> Result<(), NonEmpty> { + validate::wrap_errors(self.iter_invalid_in(model)) + } + + /// Iterates over failures of the diagram to be valid in the given model. + pub fn iter_invalid_in( + &'a self, + model: &DiscreteDblModel, + ) -> impl Iterator + 'a { + let mut errs = vec![]; + if !(self.1.validate_in(model).is_ok() && self.2.validate_in(model).is_ok()) { + panic!("Invalid domain or codomain") + } + + let cat_j = &self.1.1; + let d = &self.1.0.0.ob_generator_map; + let d_ = &self.2.0.0.ob_generator_map; + + for j in cat_j.ob_generators() { + let Some((j_, f)) = self.0.get(&j) else { + errs.push(InvalidInstanceMorphism::MissingComponent(j)); + continue; + }; + + let Some(dj) = d.get(&j) else { + errs.push(InvalidInstanceMorphism::IllSpecifiedComponent( + format!("component {j:?} wrong "), + j, + )); + continue; + }; + + if model.cod(f) != *dj { + errs.push(InvalidInstanceMorphism::IllSpecifiedComponent( + format!("cod({f:?}) != {dj:?} in C"), + j.clone(), + )); + continue; + }; + let Some(d_j_) = d_.get(j_) else { + errs.push(InvalidInstanceMorphism::IllSpecifiedComponent( + format!("ob {j:?} not found in J"), + j.clone(), + )); + continue; + }; + if model.dom(f) != *d_j_ { + errs.push(InvalidInstanceMorphism::IllSpecifiedComponent( + format!("dom({f:?}) != {d_j_:?} in C"), + j.clone(), + )) + } + } + if errs.is_empty() { + // For all h: j1 β†’ j2 + // + // D(h) + // D(j1) ---> D(j2) + // ^ ^ + // | Ξ±(j1) | Ξ±(j2) + // j1' j2' + // + // Naturality consists of a zigzag in J' between + // (j1', Ξ±(j1) β‹… D(h)) and (j2', Ξ±(j2)) + for jh in cat_j.mor_generators() { + let h = + self.1.0.0.functor_into(model).mor_generator_map().get(&jh).unwrap().clone(); + let j1 = self.1.1.get_dom(&jh).unwrap(); + let j2 = self.1.1.get_cod(&jh).unwrap(); + let (j_1, dom_comp) = self.0.get(j1).unwrap(); + let (j_2, cod_comp) = self.0.get(j2).unwrap(); + let comp_h = + dom_comp.clone().concat_in(model.generating_graph(), h.clone()).unwrap(); + if !self.2.zigzag(j_1.clone(), j_2.clone(), comp_h, cod_comp.clone(), model, None) { + errs.push(InvalidInstanceMorphism::UnnaturalComponent(jh)) + } + } + } + errs.into_iter() + } + + /// Determine if two presentations of instance morphisms present the same + /// instance morphism. + /// Note this is still too strict: we require domain and codomain categories + /// of the diagrams to be *identical*, even though they should be considered + /// equal up to some sort of equivalence. + /// This is testing whether, for fixed D and D', whether the assignments + /// given by the underlying mapping of instance morphism produce equal + /// instance morphisms. + /// Presumes that both inputs have been validated - may give incorrect + /// results or panic otherwise. + pub fn equiv(&self, other: &InstanceMorphism, model: &DiscreteDblModel) -> bool { + // No eq method yet. + if self.1 != other.1 { + panic!("Mismatched domain diagram"); + } else if self.2 != other.2 { + panic!("Mismatched codomain diagram") + } + self.0.clone().into_iter().all(|(k, (j1, src))| { + let (j2, tgt) = other.0.get(&k).unwrap(); + self.2.zigzag(j1, j2.clone(), src.clone(), tgt.clone(), model, None) + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::dbl::discrete::model::DiscreteDblModel; + use crate::dbl::discrete::model_morphism::DiscreteDblModelMapping; + use crate::one::{Path, PathEq}; + use crate::tt::{ + batch::PARSE_CONFIG, + modelgen::generate, + text_elab::Elaborator, + toplevel::{Theory, Toplevel, std_theories}, + }; + use crate::validate::Validate; + use std::rc::Rc; + use tattle::Reporter; + + use crate::stdlib::*; + use crate::zero::name; + + /// Make a model of the trivial double theory from a string + fn mk_model(s: &str) -> DiscreteDblModel { + let th_ = Rc::new(th_category()); + let th = Theory::new("".into(), th_.clone()); + + let reporter = Reporter::new(); + let toplevel = Toplevel::new(std_theories()); + + PARSE_CONFIG + .with_parsed(s, reporter.clone(), |n| { + let mut elaborator = Elaborator::new(th.clone(), reporter.clone(), &toplevel); + let (_, ty_v) = elaborator.ty(n)?; + let (model, _) = generate(&toplevel, &th, &ty_v); + Some(model) + }) + .unwrap() + } + + // First we define two diagrams D and D' in the following category, C, + // which consists in their (disjoint) images, plus two morphisms + // connecting them. + // + /// D(J) D'(J') + /// --- ---- + /// 02 <- 12 + /// | | + /// | βœ“ v + /// | 11 + /// | / ^ + /// v v βœ“ | + /// 00 <- 10 + /// + /// Returns a tuple: (C,D,D') + fn create_two_diagrams( + commutes: bool, + ) -> ( + DiscreteDblModel, + DblModelDiagram, + DblModelDiagram, + ) { + let mut c = mk_model( + "[ + o00 : Object, o02 : Object, o10: Object, o11: Object, o12: Object, + m02_00 : (Id Object)[o02, o00], m10_00 : (Id Object)[o10, o00], + m11_00 : (Id Object)[o11, o00], m10_11 : (Id Object)[o10, o11], + m12_11 : (Id Object)[o12, o11], m12_02 : (Id Object)[o12, o02] + ]", + ); + + c.add_equation(PathEq::new( + name("m10_00").into(), + Path::pair(name("m10_11"), name("m11_00")), + )); + if commutes { + c.add_equation(PathEq::new( + Path::pair(name("m12_02"), name("m02_00")), + Path::pair(name("m12_11"), name("m11_00")), + )); + } + assert!(!c.is_free()); + assert!(c.validate().is_ok()); + + // J presents an instance on C with one 02 and one 00. + let j = mk_model( + "[ + j00 : Object, j02 : Object, j02_00 : (Id Object)[j02, j00] + ]", + ); + + let mut dm: DiscreteDblModelMapping = Default::default(); + dm.assign_ob(name("j00"), name("o00")); + dm.assign_ob(name("j02"), name("o02")); + dm.assign_mor(name("j02_00"), name("m02_00").into()); + let d = DblModelDiagram(dm, j); + + assert!(d.validate_in(&c).is_ok()); + + // J' presents the terminal instance on C + let j_ = mk_model( + "[ + j_10 : Object, j_11 : Object, j_12: Object, + j_10_11 : (Id Object)[j_10, j_11], j_12_11 : (Id Object)[j_12, j_11] + ]", + ); + + let mut dm_: DiscreteDblModelMapping = Default::default(); + dm_.assign_ob(name("j_10"), name("o10")); + dm_.assign_ob(name("j_11"), name("o11")); + dm_.assign_ob(name("j_12"), name("o12")); + dm_.assign_mor(name("j_10_11"), name("m10_11").into()); + dm_.assign_mor(name("j_12_11"), name("m12_11").into()); + let d_ = DblModelDiagram(dm_, j_); + + assert!(d_.validate_in(&c).is_ok()); + (c, d, d_) + } + + #[test] + fn test_zigzag() { + let (c, d, d_) = create_two_diagrams(true); + + assert!(d.zigzag( + name("j02"), + name("j00"), + name("m02_00").into(), + QualifiedPath::empty(name("o00")), + &c, + None + )); + + assert!(d.zigzag( + name("j00"), + name("j02"), + QualifiedPath::empty(name("o00")), + name("m02_00").into(), + &c, + None + )); + + assert!(d_.zigzag( + name("j_12"), + name("j_10"), + QualifiedPath::pair(name("m12_02"), name("m02_00")), + name("m10_00").into(), + &c, + None + )); + + // If one square of does not commute - there is no zig zag anymore. + let (c, _, d_) = create_two_diagrams(false); + assert!(!d_.zigzag( + name("j_12"), + name("j_10"), + QualifiedPath::pair(name("m12_02"), name("m02_00")), + name("m10_00").into(), + &c, + None + )); + } + + #[test] + fn test_instance_morphism_validation() { + let (c, d, d_) = create_two_diagrams(true); + + let m: HashColumn = HashColumn::new( + [ + (name("j02"), (name("j_12"), Path::single(name("m12_02")))), + (name("j00"), (name("j_10"), Path::single(name("m10_00")))), + ] + .into(), + ); + let im = InstanceMorphism(&m, &d, &d_); + assert!(im.validate_in(&c).is_ok()); + + // Try the same thing without the square commuting + let (cbad, dbad, d_bad) = create_two_diagrams(false); + + let m: HashColumn = HashColumn::new( + [ + (name("j02"), (name("j_12"), Path::single(name("m12_02")))), + (name("j00"), (name("j_10"), Path::single(name("m10_00")))), + ] + .into(), + ); + let imbad = InstanceMorphism(&m, &dbad, &d_bad); + assert!( + imbad.validate_in(&cbad) + == Err(NonEmpty::new(InvalidInstanceMorphism::UnnaturalComponent( + name("j02_00").into() + ))) + ); + + // Test for various errors + // ----------------------- + // Missing a component + let m: HashColumn = + HashColumn::new([(name("j02"), (name("j_12"), Path::single(name("m12_02"))))].into()); + let im = InstanceMorphism(&m, &d, &d_); + assert!(im.validate_in(&c).is_err()); + + // Unnatural (bad target) + let m: HashColumn = HashColumn::new( + [ + (name("j02"), (name("j_12"), Path::single(name("m12_11")))), + (name("j00"), (name("j_10"), Path::single(name("m10_00")))), + ] + .into(), + ); + let im = InstanceMorphism(&m, &d, &d_); + assert!(im.validate_in(&c).is_err()); + + // Unnatural (bad src) + let m: HashColumn = HashColumn::new( + [ + (name("j02"), (name("j_12"), Path::single(name("m12_11")))), + (name("j00"), (name("j_10"), Path::single(name("m11_00")))), + ] + .into(), + ); + let im = InstanceMorphism(&m, &d, &d_); + assert!(im.validate_in(&c).is_err()); + } + #[test] + fn test_instance_morphism_equivalence1() { + let (c, d, d_) = create_two_diagrams(true); + + let m: HashColumn = HashColumn::new( + [ + (name("j02"), (name("j_12"), Path::single(name("m12_02")))), + (name("j00"), (name("j_10"), Path::single(name("m10_00")))), + ] + .into(), + ); + let im = InstanceMorphism(&m, &d, &d_); + assert!(im.validate_in(&c).is_ok()); + + let m2: HashColumn = HashColumn::new( + [ + (name("j02"), (name("j_12"), Path::single(name("m12_02")))), + (name("j00"), (name("j_12"), Path::pair(name("m12_11"), name("m11_00")))), + ] + .into(), + ); + let im2 = InstanceMorphism(&m2, &d, &d_); + + assert!(im2.validate_in(&c).is_ok()); + + assert!(im.equiv(&im2, &c)); + } +} diff --git a/packages/catlog/src/tt/batch.rs b/packages/catlog/src/tt/batch.rs index 38ba2d85a..20896c690 100644 --- a/packages/catlog/src/tt/batch.rs +++ b/packages/catlog/src/tt/batch.rs @@ -16,7 +16,8 @@ use tattle::{Reporter, declare_error}; use text_elab::*; use toplevel::*; -const PARSE_CONFIG: ParseConfig = ParseConfig::new( +/// Parser configuration +pub const PARSE_CONFIG: ParseConfig = ParseConfig::new( &[ (":", Prec::nonassoc(20)), (":=", Prec::nonassoc(10)), diff --git a/packages/catlog/src/tt/notebook_elab.rs b/packages/catlog/src/tt/notebook_elab.rs index c1059ef90..9c347c1d9 100644 --- a/packages/catlog/src/tt/notebook_elab.rs +++ b/packages/catlog/src/tt/notebook_elab.rs @@ -251,6 +251,7 @@ impl<'a> Elaborator<'a> { let (name, label, _, ty_v) = match cell { ModelJudgment::Object(ob_decl) => self.object_cell(ob_decl), ModelJudgment::Morphism(mor_decl) => self.morphism_cell(mor_decl), + ModelJudgment::Instantiation(_) => todo!(), }?; field_ty0s.push((name, (label, ty_v.ty0()))); field_ty_vs.push((name, (label, ty_v.clone()))); diff --git a/packages/catlog/src/tt/text_elab.rs b/packages/catlog/src/tt/text_elab.rs index 23abe556f..4fa876759 100644 --- a/packages/catlog/src/tt/text_elab.rs +++ b/packages/catlog/src/tt/text_elab.rs @@ -220,7 +220,8 @@ impl TopElaborator { } } -struct Elaborator<'a> { +/// Elaborator +pub struct Elaborator<'a> { theory: Theory, reporter: Reporter, toplevel: &'a Toplevel, @@ -236,7 +237,8 @@ struct ElaboratorCheckpoint { declare_error!(ELAB_ERROR, "elab", "an error during elaboration"); impl<'a> Elaborator<'a> { - fn new(theory: Theory, reporter: Reporter, toplevel: &'a Toplevel) -> Self { + /// Create a new elaborator + pub fn new(theory: Theory, reporter: Reporter, toplevel: &'a Toplevel) -> Self { Self { theory, reporter, @@ -388,7 +390,8 @@ impl<'a> Elaborator<'a> { } } - fn ty(&mut self, n: &FNtn) -> Option<(TyS, TyV)> { + /// DOCSTRING + pub fn ty(&mut self, n: &FNtn) -> Option<(TyS, TyV)> { let mut elab = self.enter(n.loc()); match n.ast0() { Var(name) => elab.lookup_ty(name_seg(*name)), diff --git a/packages/frontend/package.json b/packages/frontend/package.json index fa4f47c91..8f13367ae 100644 --- a/packages/frontend/package.json +++ b/packages/frontend/package.json @@ -16,7 +16,8 @@ "lint": "biome lint --write && biome check --write", "ci": "biome ci", "doc": "typedoc --entryPointStrategy expand ./src", - "test": "vitest --mode development" + "test": "vitest --mode development", + "test:no-backend": "vitest --mode development --exclude 'src/api/*'" }, "dependencies": { "@atlaskit/pragmatic-drag-and-drop": "^1.3.0", @@ -39,7 +40,8 @@ "@solid-primitives/active-element": "^2.1.1", "@solid-primitives/context": "^0.3.1", "@solid-primitives/destructure": "^0.2.1", - "@solid-primitives/keyboard": "^1.3.1", + "@solid-primitives/event-listener": "^2.4.3", + "@solid-primitives/map": "^0.7.2", "@solid-primitives/timer": "^1.4.1", "@solidjs/router": "^0.15.3", "@viz-js/viz": "^3.15.0", diff --git a/packages/frontend/pnpm-lock.yaml b/packages/frontend/pnpm-lock.yaml index e8ac6bdc0..02d3a004b 100644 --- a/packages/frontend/pnpm-lock.yaml +++ b/packages/frontend/pnpm-lock.yaml @@ -68,9 +68,12 @@ importers: '@solid-primitives/destructure': specifier: ^0.2.1 version: 0.2.2(solid-js@1.9.7) - '@solid-primitives/keyboard': - specifier: ^1.3.1 - version: 1.3.3(solid-js@1.9.7) + '@solid-primitives/event-listener': + specifier: ^2.4.3 + version: 2.4.3(solid-js@1.9.7) + '@solid-primitives/map': + specifier: ^0.7.2 + version: 0.7.2(solid-js@1.9.7) '@solid-primitives/timer': specifier: ^1.4.1 version: 1.4.2(solid-js@1.9.7) @@ -1074,21 +1077,11 @@ packages: peerDependencies: solid-js: ^1.6.12 - '@solid-primitives/event-listener@2.3.3': - resolution: {integrity: sha512-DAJbl+F0wrFW2xmcV8dKMBhk9QLVLuBSW+TR4JmIfTaObxd13PuL7nqaXnaYKDWOYa6otB00qcCUIGbuIhSUgQ==} - peerDependencies: - solid-js: ^1.6.12 - '@solid-primitives/event-listener@2.4.3': resolution: {integrity: sha512-h4VqkYFv6Gf+L7SQj+Y6puigL/5DIi7x5q07VZET7AWcS+9/G3WfIE9WheniHWJs51OEkRB43w6lDys5YeFceg==} peerDependencies: solid-js: ^1.6.12 - '@solid-primitives/keyboard@1.3.3': - resolution: {integrity: sha512-9dQHTTgLBqyAI7aavtO+HnpTVJgWQA1ghBSrmLtMu1SMxLPDuLfuNr+Tk5udb4AL4Ojg7h9JrKOGEEDqsJXWJA==} - peerDependencies: - solid-js: ^1.6.12 - '@solid-primitives/keyed@1.2.2': resolution: {integrity: sha512-oBziY40JK4XmJ57XGkFl8j0GtEarSu0hhjdkUQgqL/U0QQE3TZrRo9uhgH7I6VGJKBKG7SAraTPE6S5lVLM1ow==} peerDependencies: @@ -1099,6 +1092,11 @@ packages: peerDependencies: solid-js: ^1.6.12 + '@solid-primitives/map@0.7.2': + resolution: {integrity: sha512-sXK/rS68B4oq3XXNyLrzVhLtT1pnimmMUahd2FqhtYUuyQsCfnW058ptO1s+lWc2k8F/3zQSNVkZ2ifJjlcNbQ==} + peerDependencies: + solid-js: ^1.6.12 + '@solid-primitives/media@2.2.9': resolution: {integrity: sha512-QUmU62D4/d9YWx/4Dvr/UZasIkIpqNXz7wosA5GLmesRW9XlPa3G5M6uOmTw73SByHNTCw0D6x8bSdtvvLgzvQ==} peerDependencies: @@ -1144,6 +1142,11 @@ packages: peerDependencies: solid-js: ^1.6.12 + '@solid-primitives/trigger@1.2.2': + resolution: {integrity: sha512-IWoptVc0SWYgmpBPpCMehS5b07+tpFcvw15tOQ3QbXedSYn6KP8zCjPkHNzMxcOvOicTneleeZDP7lqmz+PQ6g==} + peerDependencies: + solid-js: ^1.6.12 + '@solid-primitives/utils@6.2.3': resolution: {integrity: sha512-CqAwKb2T5Vi72+rhebSsqNZ9o67buYRdEJrIFzRXz3U59QqezuuxPsyzTSVCacwS5Pf109VRsgCJQoxKRoECZQ==} peerDependencies: @@ -4129,23 +4132,11 @@ snapshots: '@solid-primitives/utils': 6.3.2(solid-js@1.9.7) solid-js: 1.9.7 - '@solid-primitives/event-listener@2.3.3(solid-js@1.9.7)': - dependencies: - '@solid-primitives/utils': 6.2.3(solid-js@1.9.7) - solid-js: 1.9.7 - '@solid-primitives/event-listener@2.4.3(solid-js@1.9.7)': dependencies: '@solid-primitives/utils': 6.3.2(solid-js@1.9.7) solid-js: 1.9.7 - '@solid-primitives/keyboard@1.3.3(solid-js@1.9.7)': - dependencies: - '@solid-primitives/event-listener': 2.4.3(solid-js@1.9.7) - '@solid-primitives/rootless': 1.5.2(solid-js@1.9.7) - '@solid-primitives/utils': 6.3.2(solid-js@1.9.7) - solid-js: 1.9.7 - '@solid-primitives/keyed@1.2.2(solid-js@1.9.7)': dependencies: solid-js: 1.9.7 @@ -4155,6 +4146,11 @@ snapshots: '@solid-primitives/trigger': 1.1.0(solid-js@1.9.7) solid-js: 1.9.7 + '@solid-primitives/map@0.7.2(solid-js@1.9.7)': + dependencies: + '@solid-primitives/trigger': 1.2.2(solid-js@1.9.7) + solid-js: 1.9.7 + '@solid-primitives/media@2.2.9(solid-js@1.9.7)': dependencies: '@solid-primitives/event-listener': 2.4.3(solid-js@1.9.7) @@ -4175,7 +4171,7 @@ snapshots: '@solid-primitives/resize-observer@2.0.26(solid-js@1.9.7)': dependencies: - '@solid-primitives/event-listener': 2.3.3(solid-js@1.9.7) + '@solid-primitives/event-listener': 2.4.3(solid-js@1.9.7) '@solid-primitives/rootless': 1.4.5(solid-js@1.9.7) '@solid-primitives/static-store': 0.0.8(solid-js@1.9.7) '@solid-primitives/utils': 6.2.3(solid-js@1.9.7) @@ -4205,6 +4201,11 @@ snapshots: '@solid-primitives/utils': 6.3.2(solid-js@1.9.7) solid-js: 1.9.7 + '@solid-primitives/trigger@1.2.2(solid-js@1.9.7)': + dependencies: + '@solid-primitives/utils': 6.3.2(solid-js@1.9.7) + solid-js: 1.9.7 + '@solid-primitives/utils@6.2.3(solid-js@1.9.7)': dependencies: solid-js: 1.9.7 diff --git a/packages/frontend/src/analysis/analysis_editor.tsx b/packages/frontend/src/analysis/analysis_editor.tsx index 268badb1d..82e7f894d 100644 --- a/packages/frontend/src/analysis/analysis_editor.tsx +++ b/packages/frontend/src/analysis/analysis_editor.tsx @@ -16,6 +16,7 @@ import { useApi } from "../api"; import { IconButton, ResizableHandle } from "../components"; import { DiagramPane } from "../diagram/diagram_editor"; import { DiagramMenu } from "../diagram/diagram_menu"; +import { createModelLibraryWithApi } from "../model"; import { ModelPane } from "../model/model_editor"; import { ModelMenu } from "../model/model_menu"; import { @@ -40,15 +41,16 @@ import PanelRight from "lucide-solid/icons/panel-right"; import PanelRightClose from "lucide-solid/icons/panel-right-close"; export default function AnalysisPage() { + const params = useParams(); + const api = useApi(); const theories = useContext(TheoryLibraryContext); invariant(theories, "Must provide theory library as context to analysis page"); - - const params = useParams(); + const models = createModelLibraryWithApi(api, theories); const [liveAnalysis] = createResource( () => params.ref, - (refId) => getLiveAnalysis(refId, api, theories), + (refId) => getLiveAnalysis(refId, api, models), ); return ( diff --git a/packages/frontend/src/analysis/document.ts b/packages/frontend/src/analysis/document.ts index a9bd7b395..7a441e9c5 100644 --- a/packages/frontend/src/analysis/document.ts +++ b/packages/frontend/src/analysis/document.ts @@ -1,17 +1,17 @@ -import type { AutomergeUrl, Repo } from "@automerge/automerge-repo"; +import type { AnyDocumentId, Repo } from "@automerge/automerge-repo"; import { type Analysis, type AnalysisType, type Document, type StableRef, + type Uuid, currentVersion, } from "catlog-wasm"; -import { type Api, type LiveDoc, getLiveDocFromDocHandle } from "../api"; +import { type Api, type LiveDoc, findAndMigrate, makeLiveDoc } from "../api"; import { type LiveDiagramDocument, getLiveDiagram, getLiveDiagramFromRepo } from "../diagram"; -import { type LiveModelDocument, getLiveModel, getLiveModelFromRepo } from "../model"; +import type { LiveModelDocument, ModelLibrary } from "../model"; import { newNotebook } from "../notebook"; -import type { TheoryLibrary } from "../theory"; /** A document defining an analysis. */ export type AnalysisDocument = Document & { type: "analysis" }; @@ -79,16 +79,16 @@ export async function createAnalysis(api: Api, analysisType: AnalysisType, analy /** Retrieve an analysis and make it "live" for editing. */ export async function getLiveAnalysis( - refId: string, + refId: Uuid, api: Api, - theories: TheoryLibrary, + models: ModelLibrary, ): Promise { const liveDoc = await api.getLiveDoc(refId, "analysis"); const { doc } = liveDoc; // XXX: TypeScript cannot narrow types in nested tagged unions. if (doc.analysisType === "model") { - const liveModel = await getLiveModel(doc.analysisOf._id, api, theories); + const liveModel = await models.getLiveModel(doc.analysisOf._id); return { type: "analysis", analysisType: "model", @@ -96,7 +96,7 @@ export async function getLiveAnalysis( liveModel, }; } else if (doc.analysisType === "diagram") { - const liveDiagram = await getLiveDiagram(doc.analysisOf._id, api, theories); + const liveDiagram = await getLiveDiagram(doc.analysisOf._id, api, models); return { type: "analysis", analysisType: "diagram", @@ -112,17 +112,17 @@ export async function getLiveAnalysis( Prefer [`getLiveAnalysis`] unless you're bypassing the official backend. */ export async function getLiveAnalysisFromRepo( - docId: AutomergeUrl, + docId: AnyDocumentId, repo: Repo, - theories: TheoryLibrary, + models: ModelLibrary, ): Promise { - const docHandle = await repo.find(docId); - const liveDoc = getLiveDocFromDocHandle(docHandle); + const docHandle = await findAndMigrate(repo, docId, "analysis"); + const liveDoc = makeLiveDoc(docHandle); const { doc } = liveDoc; - const parentId = doc.analysisOf._id as AutomergeUrl; + const parentId = doc.analysisOf._id as AnyDocumentId; if (doc.analysisType === "model") { - const liveModel = await getLiveModelFromRepo(parentId, repo, theories); + const liveModel = await models.getLiveModel(parentId); return { type: "analysis", analysisType: "model", @@ -130,7 +130,7 @@ export async function getLiveAnalysisFromRepo( liveModel, }; } else if (doc.analysisType === "diagram") { - const liveDiagram = await getLiveDiagramFromRepo(parentId, repo, theories); + const liveDiagram = await getLiveDiagramFromRepo(parentId, repo, models); return { type: "analysis", analysisType: "diagram", diff --git a/packages/frontend/src/api/document.ts b/packages/frontend/src/api/document.ts index 7e0e58eeb..f1f5a9791 100644 --- a/packages/frontend/src/api/document.ts +++ b/packages/frontend/src/api/document.ts @@ -1,4 +1,10 @@ -import type { ChangeFn, DocHandle, DocHandleChangePayload } from "@automerge/automerge-repo"; +import type { + AnyDocumentId, + ChangeFn, + DocHandle, + DocHandleChangePayload, + Repo, +} from "@automerge/automerge-repo"; import jsonpatch from "fast-json-patch"; import { type Accessor, createEffect, createSignal } from "solid-js"; import { createStore, reconcile } from "solid-js/store"; @@ -36,7 +42,7 @@ export type LiveDoc = { }; /** Info about a document ref in the CatColab backend. */ -type DocRef = { +export type DocRef = { /** ID of the document ref. */ refId: string; @@ -44,18 +50,18 @@ type DocRef = { permissions: Permissions; }; -/** Create a live document from an Automerge document handle. +/** Gets a document from an Automerge repo, migrating it if necessary. -When using the official CatColab backend, this function should be called only -indirectly, via [`getLiveDoc`]. However, if you want to bypass the CatColab -backend and fetch a document from another Automerge repo, you can call this -function directly. +Prefer calling this function over calling `Repo.find` directly to ensure that +any necessary migrations are performed before the data is accessed. */ -export function getLiveDocFromDocHandle( - docHandle: DocHandle, +export async function findAndMigrate( + repo: Repo, + docId: AnyDocumentId, docType?: Doc["type"], - docRef?: DocRef, -): LiveDoc { +): Promise> { + const docHandle = await repo.find(docId); + // Perform any migrations on the document. // XXX: copied from automerge-doc-server/src/server.ts: const docBefore = docHandle.doc(); @@ -67,16 +73,29 @@ export function getLiveDocFromDocHandle( }); } - const doc = makeDocHandleReactive(docHandle); if (docType !== undefined) { + const actualType = docHandle.doc().type; invariant( - doc.type === docType, - () => `Expected document of type ${docType}, got ${doc.type}`, + actualType === docType, + () => `Expected document of type ${docType}, got ${actualType}`, ); } + return docHandle; +} - const changeDoc = (f: ChangeFn) => docHandle.change(f); +/** Create a live document from an Automerge document handle. +When using the official CatColab backend, this function should be called only +indirectly, via [`getLiveDoc`]. However, if you want to bypass the CatColab +backend and fetch a document from another Automerge repo, you can call this +function directly. + */ +export function makeLiveDoc( + docHandle: DocHandle, + docRef?: DocRef, +): LiveDoc { + const doc = makeDocHandleReactive(docHandle); + const changeDoc = (f: ChangeFn) => docHandle.change(f); return { doc, changeDoc, docHandle, docRef }; } diff --git a/packages/frontend/src/api/document_rpc.test.ts b/packages/frontend/src/api/document_rpc.test.ts index 0ac5afe44..3bae8afa0 100644 --- a/packages/frontend/src/api/document_rpc.test.ts +++ b/packages/frontend/src/api/document_rpc.test.ts @@ -5,7 +5,8 @@ import { deleteUser, getAuth, signOut } from "firebase/auth"; import * as uuid from "uuid"; import { assert, afterAll, describe, test } from "vitest"; -import { initTestUserAuth } from "../util/test_util.ts"; +import type { Document } from "catlog-wasm"; +import { createTestDocument, initTestUserAuth } from "../util/test_util.ts"; import { createRpcClient, unwrap, unwrapErr } from "./rpc.ts"; const serverUrl = import.meta.env.VITE_SERVER_URL; @@ -23,10 +24,7 @@ const repo = new Repo({ //afterAll(() => repo.shutdown()); describe("RPC for Automerge documents", async () => { - const content = { - type: "model", - name: "My model", - }; + const content = createTestDocument("My model"); const refId = unwrap(await rpc.new_ref.mutate(content)); test.sequential("should get a valid ref UUID", () => { assert(uuid.validate(refId)); @@ -54,14 +52,24 @@ describe("RPC for Automerge documents", async () => { assert(result.tag === "Err" && result.code === 404); }); + test("should reject invalid documents", async () => { + const invalidDoc = { + type: "model", + name: "Invalid model", + }; + const invalidResult = await rpc.new_ref.mutate(invalidDoc); + assert(invalidResult.tag === "Err"); + assert.strictEqual(invalidResult.code, 400); + }); + if (!isValidDocumentId(refDoc.docId)) { return; } - const docHandle = (await repo.find(refDoc.docId)) as DocHandle; + const docHandle = (await repo.find(refDoc.docId)) as DocHandle; const doc = docHandle.doc(); test.sequential("should get the original document data", () => { - assert.deepStrictEqual(doc, content); + assert.deepStrictEqual(doc, content as unknown as Document); }); const newName = "Renamed model"; @@ -70,7 +78,7 @@ describe("RPC for Automerge documents", async () => { }); test.sequential("should autosave to the database", { timeout: 1000, retry: 5 }, async () => { - const newContent = unwrap(await rpc.head_snapshot.query(refId)) as typeof content; + const newContent = unwrap(await rpc.head_snapshot.query(refId)) as unknown as Document; assert.strictEqual(newContent.name, newName); }); }); @@ -86,10 +94,7 @@ describe("Authorized RPC", async () => { unwrap(await rpc.sign_up_or_sign_in.mutate()); - const content = { - type: "model", - name: "My private model", - }; + const content = createTestDocument("My private model"); const privateId = unwrap(await rpc.new_ref.mutate(content)); test.sequential("should get a valid ref UUID when authenticated", () => { assert(uuid.validate(privateId)); @@ -111,12 +116,7 @@ describe("Authorized RPC", async () => { }); }); - const readonlyId = unwrap( - await rpc.new_ref.mutate({ - type: "model", - name: "My readonly model", - }), - ); + const readonlyId = unwrap(await rpc.new_ref.mutate(createTestDocument("My readonly model"))); unwrap( await rpc.set_permissions.mutate(readonlyId, { anyone: "Read", diff --git a/packages/frontend/src/api/types.ts b/packages/frontend/src/api/types.ts index 4a89e0759..29ccfab9c 100644 --- a/packages/frontend/src/api/types.ts +++ b/packages/frontend/src/api/types.ts @@ -1,4 +1,4 @@ -import { type DocumentId, Repo } from "@automerge/automerge-repo"; +import { type DocHandle, type DocumentId, Repo } from "@automerge/automerge-repo"; import { BrowserWebSocketClientAdapter } from "@automerge/automerge-repo-network-websocket"; import { IndexedDBStorageAdapter } from "@automerge/automerge-repo-storage-indexeddb"; import type { FirebaseApp } from "firebase/app"; @@ -8,7 +8,7 @@ import * as uuid from "uuid"; import type { Permissions } from "catcolab-api"; import type { Document, StableRef, Uuid } from "catlog-wasm"; import type { InterfaceToType } from "../util/types"; -import { type LiveDoc, getLiveDocFromDocHandle } from "./document"; +import { type LiveDoc, findAndMigrate, makeLiveDoc } from "./document"; import { type RpcClient, createRpcClient } from "./rpc"; /** Bundle of everything needed to interact with the CatColab backend. */ @@ -65,20 +65,28 @@ export class Api { refId: Uuid, docType?: Doc["type"], ): Promise> { - const { docId, permissions, localOnly } = await this.getDocCacheEntry(refId); - const repo = localOnly ? this.localRepo : this.repo; - const docHandle = await repo.find(docId); - - return getLiveDocFromDocHandle(docHandle, docType, { + const docHandle = await this.getDocHandle(refId, docType); + const permissions = await this.getPermissions(refId); + return makeLiveDoc(docHandle, { refId, permissions, }); } - /** Get an Automerge document ID for the given document ref. */ - async getDocId(refId: Uuid): Promise { - const { docId } = await this.getDocCacheEntry(refId); - return docId; + /** Gets an Automerge document handle for the given document ref. */ + async getDocHandle( + refId: Uuid, + docType?: Doc["type"], + ): Promise> { + const { docId, localOnly } = await this.getDocCacheEntry(refId); + const repo = localOnly ? this.localRepo : this.repo; + return await findAndMigrate(repo, docId, docType); + } + + /** Get permissions for the given document ref. */ + async getPermissions(refId: Uuid): Promise { + const { permissions } = await this.getDocCacheEntry(refId); + return permissions; } private async getDocCacheEntry(refId: Uuid): Promise { diff --git a/packages/frontend/src/api/user_rpc.test.ts b/packages/frontend/src/api/user_rpc.test.ts index bfd076069..26d6c4247 100644 --- a/packages/frontend/src/api/user_rpc.test.ts +++ b/packages/frontend/src/api/user_rpc.test.ts @@ -5,7 +5,7 @@ import { v4 } from "uuid"; import { assert, afterAll, describe, test } from "vitest"; import type { UserProfile } from "catcolab-api"; -import { initTestUserAuth } from "../util/test_util.ts"; +import { createTestDocument, initTestUserAuth } from "../util/test_util.ts"; import { createRpcClient, unwrap, unwrapErr } from "./rpc.ts"; const serverUrl = import.meta.env.VITE_SERVER_URL; @@ -113,12 +113,7 @@ describe("Sharing documents between users", async () => { unwrap(await rpc.sign_up_or_sign_in.mutate()); // Create the document to be shared. - const refId = unwrap( - await rpc.new_ref.mutate({ - type: "model", - name: "My shared model", - }), - ); + const refId = unwrap(await rpc.new_ref.mutate(createTestDocument("My shared model"))); // Share the document with read-only permissions. unwrap( diff --git a/packages/frontend/src/components/completions.tsx b/packages/frontend/src/components/completions.tsx index 09849fada..0f5b89bfe 100644 --- a/packages/frontend/src/components/completions.tsx +++ b/packages/frontend/src/components/completions.tsx @@ -1,6 +1,7 @@ -import type { KbdKey } from "@solid-primitives/keyboard"; import { For, type JSX, Show, createMemo, createSignal, onMount } from "solid-js"; +import type { KbdKey } from "../util/keyboard"; + import "./completions.css"; /** A possible completion. */ @@ -96,7 +97,7 @@ export function Completions(props: {
{c.name}
- +
diff --git a/packages/frontend/src/diagram/diagram_editor.tsx b/packages/frontend/src/diagram/diagram_editor.tsx index 73b72794b..7d587de86 100644 --- a/packages/frontend/src/diagram/diagram_editor.tsx +++ b/packages/frontend/src/diagram/diagram_editor.tsx @@ -6,12 +6,11 @@ import invariant from "tiny-invariant"; import type { DiagramJudgment } from "catlog-wasm"; import { useApi } from "../api"; import { InlineInput } from "../components"; -import { LiveModelContext } from "../model"; +import { LiveModelContext, createModelLibraryWithApi } from "../model"; import { type CellConstructor, type FormalCellEditorProps, NotebookEditor, - cellShortcutModifier, newFormalCell, } from "../notebook"; import { DocumentBreadcrumbs, DocumentLoadingScreen, Toolbar } from "../page"; @@ -33,15 +32,16 @@ import { import "./diagram_editor.css"; export default function DiagramPage() { + const params = useParams(); + const api = useApi(); const theories = useContext(TheoryLibraryContext); invariant(theories, "Must provide theory library as context to diagram page"); - - const params = useParams(); + const models = createModelLibraryWithApi(api, theories); const [liveDiagram] = createResource( () => params.ref, - (refId) => getLiveDiagram(refId, api, theories), + (refId) => getLiveDiagram(refId, api, models), ); return ( @@ -179,7 +179,7 @@ function diagramCellConstructor(meta: InstanceTypeMeta): CellConstructor { /** Retrieve a diagram from the backend and make it "live" for editing. */ export async function getLiveDiagram( - refId: string, + refId: Uuid, api: Api, - theories: TheoryLibrary, + models: ModelLibrary, ): Promise { const liveDoc = await api.getLiveDoc(refId, "diagram"); const modelRefId = liveDoc.doc.diagramIn._id; - const liveModel = await getLiveModel(modelRefId, api, theories); + const liveModel = await models.getLiveModel(modelRefId); return enlivenDiagramDocument(liveDoc, liveModel); } @@ -148,14 +148,14 @@ export async function getLiveDiagram( Prefer [`getLiveDiagram`] unless you're bypassing the official backend. */ export async function getLiveDiagramFromRepo( - docId: AutomergeUrl, + docId: AnyDocumentId, repo: Repo, - theories: TheoryLibrary, + models: ModelLibrary, ): Promise { - const docHandle = await repo.find(docId); - const liveDoc = getLiveDocFromDocHandle(docHandle); - const modelDocId = liveDoc.doc.diagramIn._id as AutomergeUrl; + const docHandle = await findAndMigrate(repo, docId, "diagram"); + const liveDoc = makeLiveDoc(docHandle); + const modelDocId = liveDoc.doc.diagramIn._id as AnyDocumentId; - const liveModel = await getLiveModelFromRepo(modelDocId, repo, theories); + const liveModel = await models.getLiveModel(modelDocId); return enlivenDiagramDocument(liveDoc, liveModel); } diff --git a/packages/frontend/src/help/analysis/linear-ode.mdx b/packages/frontend/src/help/analysis/linear-ode.mdx index e2da915bb..fedffd8c2 100644 --- a/packages/frontend/src/help/analysis/linear-ode.mdx +++ b/packages/frontend/src/help/analysis/linear-ode.mdx @@ -1,14 +1,12 @@ -import { A } from "@solidjs/router"; - We might want to consider linear dynamics whenever we have *unidirectional* influence between variables, where variables affect the *first derivative* of other variables, with no feedback a priori. #### User inputs
Initial value: $\mathbb{R}_{\geqslant0}$
-
The initial value of the variable.
+
The initial value of the variable
Coefficient: $\mathbb{R}_{\geqslant0}$
-
The coefficient ($k$) corresponding to the arrow.
+
The coefficient ($k$) corresponding to the arrow
Duration: $\mathbb{R}_{\geqslant0}$
-
The total duration of the simulation in units of time.
+
The total duration of the simulation in units of time
diff --git a/packages/frontend/src/help/analysis/loops.mdx b/packages/frontend/src/help/analysis/loops.mdx index 5fe63104c..648e80a7b 100644 --- a/packages/frontend/src/help/analysis/loops.mdx +++ b/packages/frontend/src/help/analysis/loops.mdx @@ -1,12 +1,10 @@ -import { A } from "@solidjs/router"; - CatColab supports this analysis by searching for morphisms from the causal loop diagram given by a free loop. #### User inputs
Limit length of paths: true | false
-
Whether or not to limit the number of paths composed in order to find loops.
+
Whether or not to limit the number of paths composed in order to find loops
Maximum length of path: $\mathbb{Z}_{\geqslant0}$
-
A non-negative integer determining the maximum number of paths to compose in order to find loops.
+
A non-negative integer determining the maximum number of paths to compose in order to find loops
diff --git a/packages/frontend/src/help/analysis/lotka-volterra.mdx b/packages/frontend/src/help/analysis/lotka-volterra.mdx index b27f713ff..5a53011f1 100644 --- a/packages/frontend/src/help/analysis/lotka-volterra.mdx +++ b/packages/frontend/src/help/analysis/lotka-volterra.mdx @@ -1,16 +1,14 @@ -import { A } from "@solidjs/router"; - We might want to think about Lotka–Volterra dynamics whenever influence between variables is *bidirectional*, e.g. for predator-prey models, where higher population of predators means lower population of prey, which means lower population of predators, and so on. #### User inputs
Initial value: $\mathbb{R}_{\geqslant0}$
-
The initial value of the variable.
+
The initial value of the variable
Interaction: $\mathbb{R}_{\geqslant0}$
-
The interaction coefficient ($k$) corresponding to the arrow.
+
The interaction coefficient ($k$) corresponding to the arrow
Growth/decay: $\mathbb{R}$
-
The growth rate ($g$) of the variable.
+
The growth rate ($g$) of the variable
Duration: $\mathbb{R}_{\geqslant0}$
-
The total duration of the simulation in units of time.
+
The total duration of the simulation in units of time
diff --git a/packages/frontend/src/help/analysis/mass-action.mdx b/packages/frontend/src/help/analysis/mass-action.mdx index f0c684e29..0aa5db8ae 100644 --- a/packages/frontend/src/help/analysis/mass-action.mdx +++ b/packages/frontend/src/help/analysis/mass-action.mdx @@ -1,12 +1,10 @@ -import { A } from "@solidjs/router"; - #### User inputs
Initial value: $\mathbb{R}_{\geqslant0}$
-
The initial value of the stock.
+
The initial value of the concentration
Rate: $\mathbb{R}_{\geqslant0}$
-
The rate coefficient ($r$) of the flow.
+
The rate coefficient ($r$) of the reaction
Duration: $\mathbb{R}_{\geqslant0}$
-
The total duration of the simulation in units of time.
+
The total duration of the simulation in units of time
diff --git a/packages/frontend/src/help/analysis/stochastic-mass-action.mdx b/packages/frontend/src/help/analysis/stochastic-mass-action.mdx new file mode 100644 index 000000000..fcc553875 --- /dev/null +++ b/packages/frontend/src/help/analysis/stochastic-mass-action.mdx @@ -0,0 +1,10 @@ +#### User inputs + +
+
Initial value: $\mathbb{N}$
+
The initial count of the population
+
Rate: $\mathbb{R}_{\geqslant0}$
+
The rate coefficient ($r$) of the reaction
+
Duration: $\mathbb{R}_{\geqslant0}$
+
The total duration of the simulation in units of time
+
diff --git a/packages/frontend/src/help/analysis/subreachability.mdx b/packages/frontend/src/help/analysis/subreachability.mdx new file mode 100644 index 000000000..8730a76d2 --- /dev/null +++ b/packages/frontend/src/help/analysis/subreachability.mdx @@ -0,0 +1,8 @@ +#### User inputs + +
+
Initial tokens: $\mathbb{N}$
+
Some number of tokens for each place, describing the initial tokening
+
Forbidden tokens: $\mathbb{N}$
+
Some number of tokens for each place, describing the forbidden tokening
+
diff --git a/packages/frontend/src/help/analysis/visualization.mdx b/packages/frontend/src/help/analysis/visualization.mdx index 2aa4a7fc7..daa86d128 100644 --- a/packages/frontend/src/help/analysis/visualization.mdx +++ b/packages/frontend/src/help/analysis/visualization.mdx @@ -1,11 +1,9 @@ -import { A } from "@solidjs/router"; - #### User inputs
Layout: `Directed | Undirected`
Switches between Graphviz layout engines: `Directed` enables [`dot`](https://graphviz.org/docs/layouts/dot/) and `Undirected` enables [`neato`](https://graphviz.org/docs/layouts/neato/)
Direction: `Vertical | Horizontal`
-
(Only if Layout = `Directed`). Switches between orienting the graph from top to bottom and from left to right.
+
*(Only if Layout = `Directed`)* Switches between orienting the graph from top to bottom and from left to right
- \ No newline at end of file + diff --git a/packages/frontend/src/help/logics/petri-net.mdx b/packages/frontend/src/help/logics/petri-net.mdx new file mode 100644 index 000000000..a126a1c1e --- /dev/null +++ b/packages/frontend/src/help/logics/petri-net.mdx @@ -0,0 +1,66 @@ +import { A } from "@solidjs/router"; +import { HelpAnalysisById } from "../logic_help_detail" + +## Description + +**Petri nets** were invented to express discrete-event dynamical systems with concurrency, or networks of resources and processes, describing how various states (called **places**) relate to one another in terms of **transitions**. +Each transition has incoming and outgoing arrows (called **arcs**) connected to places, which describe the **input** and **output** places of the transition (respectively). + +One way of understanding Petri nets is through their **token semantics**. +We place a number of "tokens" in each place and then check all the transitions to see if all their input places have a token: if so, then we can "fire" the transition, moving the tokens from the input places to the output places; if not, then nothing happens. +In many classical references the token semantics are built directly in to the definition of Petri nets (often called a **marking** or **tokening**), but an unmarked Petri net is already itself a useful formal gadget, analogous to how can be useful to consider an ordinary differential equation without giving its initial conditions. + + +### Further reading + +- Wikipedia: [Petri net](https://en.wikipedia.org/wiki/Petri_net) +- John C. Baez, Blake S. Pollard, "A Compositional Framework for Reaction Networks", (2017) [arXiv:1704.02051](https://arxiv.org/abs/1704.02051) +- Series of blog posts by John Baez on Petri nets, all listed + [here](https://johncarlosbaez.wordpress.com/?s=petri) + + +## Instances and analyses + +### Analyses + + + + +A **tokening** of a Petri net assigns some number of tokens to each place. A transition can **fire** by removing a token from each of its inputs and adding a token to each of its outputs. Given an initial tokening, we are concerned with all of the possible tokenings that can be arrived at via some sequence of transitions firing. This is usually infinite. Sometimes we care about a particular tokening as "forbidden": we care about confirming that no sequence of transitions firing could ever reach state for which includes the forbidden state. + +This analysis shows a ❌ if there does exist any reachable tokening from the given initial one which is greater or equal to the given forbidden one, and shows a βœ… otherwise. + +*Note:* we are *not* checking that the forbidden state is reached exactly, just whether *some* state which is strictly larger than the forbidden state can be reached. + + + +Places are interpreted as quantities that can vary in time ("concentrations"), and the transitions ("reactions") in the model determine equations constraining the time derivatives of the stocks. +Each place is given an **initial value**. + +- A transition $A\xrightarrow{T}B$ between places $A$ and $B$ is interpreted as the equations $\{\dot{A}=-T,\dot{B}=T\}$, i.e. the transition $T$ goes out of $A$ and into $B$. + +- Equations governing places add linearly: in an arbitrary Petri net, the equation governing a place $A$ is $\dot{A}=\sum_{A_\text{in}}T_i - \sum_{A_\text{out}}T_j$, where $A_\text{in}$ (resp. $A_\text{out}$) is the set of all transitions whose target (resp. source) is $A$. + +- Transitions are determined by their **rate** coefficient $r$ and take into account only *incoming* arcs: a transition $A\xrightarrow{T}B$ imposes the equation $T=rA$. +However, equations governing transitions are *non-linear*: if a transition $T$ has two incoming arcs from places $A$ and $A'$, then $T=rAA'$. + + + +Like ordinary mass-action dynamics, stochastic mass-action dynamics for Petri nets interpret places as time-varying quantities that transfer mass through transitions. The difference in the *stochastic* analysis is that the time between reactions is determined by an exponential distribution whose mean is the total reaction rate. The reaction rate is just the sum of **propensity** for each reaction, which is calculated as the product of the quantity for each reactant. + +Let's consider the simple SIR model as an example: + +$[\mathrm{S}, \mathrm{I}]\xrightarrow{\text{infection}}[\mathrm{I},\mathrm{I}]$ + +$[\mathrm{I}]\xrightarrow{\text{recovery}}[\mathrm{R}]$ + +The infection rate is proportional to the product of the sizes between susceptible and infected populations, and the recovery rate is proportional to the infected populations. The time between events is sampled from an exponential distribution whose rate is the reaction rate. The probability that a certain event will occur at this next step is determined by its rate. After a particular event is chosen, the mass of the reactants are updated appropriately. In our example, if infection takes place, the susceptible population decreases by one and the infected increases by one, since an infected person was "used" to produce two new infected persons. If a recovery takes place, then the infected and recovery populations decrease and increase by one, respectively. + +We can add an additional `death` process by either considering a new population "D" for "deceased," or by leaving the list in the domain empty: + +$[\mathrm{I}]\xrightarrow{\text{death}}[\phantom{I}]$ + +In this case, a death is interpreted as only decrementing the infected population. The empty list (a valid term in the model) has a valid interpretation in the analysis. + +We use the Gillespie algorithm from the [`Armavica/rebop`](https://github.com/Armavica/rebop) package. This process was first popularized by Gillespie in ["A General Method for Numerically Simulating the Stochastic Time Evolution of Coupled Chemical Reactions"](https://doi.org/10.1016/0021-9991(76)90041-3). + diff --git a/packages/frontend/src/model/document.ts b/packages/frontend/src/model/document.ts index 22b05ea1e..2c1358d03 100644 --- a/packages/frontend/src/model/document.ts +++ b/packages/frontend/src/model/document.ts @@ -1,17 +1,11 @@ -import type { AutomergeUrl, Repo } from "@automerge/automerge-repo"; -import { type Accessor, createMemo, createResource } from "solid-js"; +import type { Accessor } from "solid-js"; +import invariant from "tiny-invariant"; -import { - type DblModel, - type Document, - type ModelJudgment, - type ModelValidationResult, - currentVersion, - elaborateModel, -} from "catlog-wasm"; -import { type Api, type LiveDoc, getLiveDocFromDocHandle } from "../api"; -import { NotebookUtils, newNotebook } from "../notebook"; +import { type DblModel, type Document, type ModelJudgment, currentVersion } from "catlog-wasm"; +import type { Api, LiveDoc } from "../api"; +import { NotebookUtils, newNotebook } from "../notebook/types"; import type { Theory, TheoryLibrary } from "../theory"; +import type { ValidatedModel } from "./model_library"; /** A document defining a model. */ export type ModelDocument = Document & { type: "model" }; @@ -36,9 +30,6 @@ export type LiveModelDocument = { /** Live document with the model data. */ liveDoc: LiveDoc; - /** A memo of the formal content of the model. */ - formalJudgments: Accessor>; - /** A memo of the double theory that the model is of. */ theory: Accessor; @@ -49,84 +40,6 @@ export type LiveModelDocument = { validatedModel: Accessor; }; -/** A validated model as represented in `catlog`. */ -export type ValidatedModel = - /** A successfully elaborated and validated model. */ - | { - tag: "Valid"; - model: DblModel; - } - /** An elaborated model with one or more validation errors. */ - | { - tag: "Invalid"; - model: DblModel; - errors: (ModelValidationResult & { tag: "Err" })["content"]; - } - /** A model that failed to even elaborate. */ - | { - tag: "Illformed"; - error: string; - }; - -function enlivenModelDocument( - liveDoc: LiveDoc, - theories: TheoryLibrary, -): LiveModelDocument { - const { doc } = liveDoc; - - // Memo-ize the *formal* content of the notebook, since most derived objects - // will not depend on the informal (rich-text) content in notebook. - const formalJudgments = createMemo>( - () => NotebookUtils.getFormalContent(doc.notebook), - [], - ); - - const [theory] = createResource( - () => doc.theory, - (theoryId) => theories.get(theoryId), - ); - - const elaboratedModel = (): DblModel | undefined => { - const validated = validatedModel(); - if (validated && validated.tag !== "Illformed") { - return validated.model; - } - }; - - const validatedModel = createMemo( - () => { - const th = theory(); - if (!th) { - // Abort immediately if the theory is undefined. - return undefined; - } - let model: DblModel; - try { - model = elaborateModel(formalJudgments(), th.theory); - } catch (e) { - return { tag: "Illformed", error: String(e) }; - } - const result = model.validate(); - if (result.tag === "Ok") { - return { tag: "Valid", model }; - } else { - return { tag: "Invalid", model, errors: result.content }; - } - }, - undefined, - { equals: false }, - ); - - return { - type: "model", - liveDoc, - formalJudgments, - theory, - elaboratedModel, - validatedModel, - }; -} - /** Create a new model in the backend. Returns the ref ID of the created document. @@ -144,43 +57,21 @@ export async function createModel( return api.createDoc(init); } -/** Retrieve a model from the backend and make it "live" for editing. */ -export async function getLiveModel( - refId: string, - api: Api, - theories: TheoryLibrary, -): Promise { - const liveDoc = await api.getLiveDoc(refId, "model"); - return enlivenModelDocument(liveDoc, theories); -} - -/** Get a model from an Automerge repo and make it "live" for editing. - -Prefer [`getLiveModel`] unless you're bypassing the official CatColab backend. - */ -export async function getLiveModelFromRepo( - docId: AutomergeUrl, - repo: Repo, - theories: TheoryLibrary, -): Promise { - const docHandle = await repo.find(docId); - const liveDoc = getLiveDocFromDocHandle(docHandle); - return enlivenModelDocument(liveDoc, theories); -} - /** Migrate a model document from one theory to another. */ export async function migrateModelDocument( - liveDoc: LiveDoc, + liveModel: LiveModelDocument, targetTheoryId: string, theories: TheoryLibrary, ) { - const doc = liveDoc.doc; - const theory = await theories.get(doc.theory); + const { doc, changeDoc } = liveModel.liveDoc; const targetTheory = await theories.get(targetTheoryId); + const theory = liveModel.theory(); + let model = liveModel.elaboratedModel(); + invariant(theory && model); // FIXME: Should fail gracefully. // Trivial migration. if (!NotebookUtils.hasFormalCells(doc.notebook) || theory.inclusions.includes(targetTheoryId)) { - liveDoc.changeDoc((doc) => { + changeDoc((doc) => { doc.theory = targetTheoryId; }); return; @@ -194,9 +85,8 @@ export async function migrateModelDocument( // TODO: We need a general method to propagate changes from catlog models to // notebooks. This stop-gap solution only works because pushforward // migration doesn't have to create/delete cells, only update types. - let model = elaborateModel(NotebookUtils.getFormalContent(doc.notebook), theory.theory); model = migration.migrate(model, targetTheory.theory); - liveDoc.changeDoc((doc) => { + changeDoc((doc) => { doc.theory = targetTheoryId; for (const judgment of NotebookUtils.getFormalContent(doc.notebook)) { if (judgment.tag === "object") { diff --git a/packages/frontend/src/model/index.ts b/packages/frontend/src/model/index.ts index d1413d7c2..6c3915641 100644 --- a/packages/frontend/src/model/index.ts +++ b/packages/frontend/src/model/index.ts @@ -1,3 +1,4 @@ -export * from "./document"; export * from "./context"; +export * from "./document"; +export * from "./model_library"; export * from "./types"; diff --git a/packages/frontend/src/model/model_editor.tsx b/packages/frontend/src/model/model_editor.tsx index 7d0c634cb..cd4e1fe90 100644 --- a/packages/frontend/src/model/model_editor.tsx +++ b/packages/frontend/src/model/model_editor.tsx @@ -1,7 +1,7 @@ import { useParams } from "@solidjs/router"; import { getAuth } from "firebase/auth"; import { useAuth, useFirebaseApp } from "solid-firebase"; -import { Match, Show, Switch, createResource, createSignal, useContext } from "solid-js"; +import { Match, Show, Switch, createSignal, useContext } from "solid-js"; import invariant from "tiny-invariant"; import type { ModelJudgment } from "catlog-wasm"; @@ -12,7 +12,6 @@ import { type FormalCellEditorProps, NotebookEditor, NotebookUtils, - cellShortcutModifier, newFormalCell, } from "../notebook"; import { DocumentBreadcrumbs, DocumentLoadingScreen, Toolbar } from "../page"; @@ -22,7 +21,8 @@ import { type ModelTypeMeta, TheoryLibraryContext } from "../theory"; import { TheorySelectorDialog } from "../theory/theory_selector"; import { PermissionsButton } from "../user"; import { LiveModelContext } from "./context"; -import { type LiveModelDocument, getLiveModel, migrateModelDocument } from "./document"; +import { type LiveModelDocument, migrateModelDocument } from "./document"; +import { createModelLibraryWithApi } from "./model_library"; import { ModelMenu } from "./model_menu"; import { MorphismCellEditor } from "./morphism_cell_editor"; import { ObjectCellEditor } from "./object_cell_editor"; @@ -37,16 +37,14 @@ import { import "./model_editor.css"; export default function ModelPage() { + const params = useParams(); + const api = useApi(); const theories = useContext(TheoryLibraryContext); invariant(theories, "Must provide theory library as context to model page"); + const models = createModelLibraryWithApi(api, theories); - const params = useParams(); - - const [liveModel] = createResource( - () => params.ref, - (refId) => getLiveModel(refId, api, theories), - ); + const liveModel = models.useLiveModel(() => params.ref); return ( }> @@ -103,7 +101,7 @@ export function ModelPane(props: { migrateModelDocument(liveDoc(), id, stdTheories)} + setTheory={(id) => migrateModelDocument(props.liveModel, id, stdTheories)} theories={selectableTheories()} /> @@ -195,7 +193,7 @@ function modelCellConstructor(meta: ModelTypeMeta): CellConstructor models.destroy()); + +describe("Model in library", async () => { + const modelDoc = newModelDocument("empty"); + const docHandle = repo.create(modelDoc); + + const getEntry = await models.getElaboratedModel(docHandle.documentId); + const generation = () => getEntry()?.generation; + const status = () => getEntry()?.validatedModel.tag; + + test("should have generation number", () => { + assert.strictEqual(generation(), 1); + assert.strictEqual(models.size, 1); + }); + + test("should have instantiated theory", () => { + assert(getEntry()?.theory instanceof Theory); + }); + + test("should have elaborated and validated model", () => { + const validated = getEntry()?.validatedModel; + assert(validated?.tag === "Valid"); + assert(validated.model instanceof DblModel); + }); + + const changeDoc = async (f: ChangeFn) => { + docHandle.change(f); + // XXX: Change handler installed by `ModelLibrary` is async. + await sleep(0); + }; + + // XXX: Pre-load the theory that we'll use. + await stdTheories.get("causal-loop"); + + test.sequential("should re-elaborate when theory changes", async () => { + await changeDoc((doc) => { + doc.theory = "causal-loop"; + }); + assert.strictEqual(generation(), 2); + assert.strictEqual(status(), "Valid"); + assert.strictEqual(models.size, 1); + }); + + test.sequential("should NOT re-elaborate when document name changes", async () => { + await changeDoc((doc) => { + doc.name = "My causal loop diagram"; + }); + assert.strictEqual(generation(), 2); + }); + + test.sequential("should re-elaborate when notebook cells are added", async () => { + await changeDoc((doc) => { + NotebookUtils.appendCell( + doc.notebook, + newFormalCell(newObjectDecl({ tag: "Basic", content: "Object" })), + ); + NotebookUtils.appendCell(doc.notebook, newRichTextCell()); + }); + assert.strictEqual(generation(), 3); + assert.strictEqual(status(), "Valid"); + }); + + test.sequential("should NOT re-elaborate when rich text is edited", async () => { + await changeDoc((doc) => { + const cellId = NotebookUtils.getCellIdByIndex(docHandle.doc().notebook, 1); + const cell = doc.notebook.cellContents[cellId]; + assert(cell?.tag === "rich-text"); + cell.content = "Some text"; + }); + assert.strictEqual(generation(), 3); + }); + + const anotherModelDoc = newModelDocument("causal-loop"); + NotebookUtils.appendCell( + anotherModelDoc.notebook, + newFormalCell(newObjectDecl({ tag: "Basic", content: "Object" })), + ); + const anotherDocHandle = repo.create(modelDoc); + + test.sequential("should automatically include instantiated models", async () => { + const inst = newInstantiatedModel({ + _id: anotherDocHandle.documentId, + _version: null, + _server: "", + type: "instantiation", + }); + await changeDoc((doc) => { + NotebookUtils.appendCell(doc.notebook, newFormalCell(inst)); + }); + assert.strictEqual(generation(), 4); + assert.strictEqual(status(), "Valid"); + assert.strictEqual(models.size, 2); + }); + + const cyclicModel = newModelDocument("empty"); + const cyclicModelHandle = repo.create(cyclicModel); + cyclicModelHandle.change((doc) => { + NotebookUtils.appendCell( + doc.notebook, + newFormalCell( + newInstantiatedModel({ + _id: cyclicModelHandle.documentId, + _version: null, + _server: "", + type: "instantiation", + }), + ), + ); + }); + + test("should gracefully fail to elaborate when it has a cycle", async () => { + const getEntry = await models.getElaboratedModel(cyclicModelHandle.documentId); + assert.strictEqual(getEntry()?.validatedModel.tag, "Illformed"); + }); +}); + +const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); diff --git a/packages/frontend/src/model/model_library.ts b/packages/frontend/src/model/model_library.ts new file mode 100644 index 000000000..a9e5f67af --- /dev/null +++ b/packages/frontend/src/model/model_library.ts @@ -0,0 +1,341 @@ +import { + type AnyDocumentId, + type DocHandle, + type DocHandleChangePayload, + type Patch, + type Repo, + interpretAsDocumentId, +} from "@automerge/automerge-repo"; +import { ReactiveMap } from "@solid-primitives/map"; +import { type Accessor, createResource, onCleanup } from "solid-js"; +import invariant from "tiny-invariant"; +import * as uuid from "uuid"; + +import { + type DblModel, + DblModelMap, + type DblTheory, + type ModelNotebook, + type ModelValidationResult, + type Uuid, + elaborateModel, +} from "catlog-wasm"; +import { type Api, type DocRef, type LiveDoc, findAndMigrate, makeLiveDoc } from "../api"; +import { NotebookUtils } from "../notebook/types"; +import type { Theory, TheoryLibrary } from "../theory"; +import type { LiveModelDocument, ModelDocument } from "./document"; + +/** An elaborated model along with its validation status. */ +export type ValidatedModel = + /** A successfully elaborated and validated model. */ + | { + tag: "Valid"; + model: DblModel; + } + /** An elaborated model with one or more validation errors. */ + | { + tag: "Invalid"; + model: DblModel; + errors: (ModelValidationResult & { tag: "Err" })["content"]; + } + /** A model that failed to even elaborate. */ + | { + tag: "Illformed"; + error: string; + }; + +/** An entry in a `ModelLibrary`. */ +export type ModelEntry = { + /** The double theory that the model is a model of. */ + theory: Theory; + + /** The elaborated and validated model. */ + validatedModel: ValidatedModel; + + /** Generation number, incremented each time the model is elaborated. + + Mainly intended for debugging and testing purposes. + */ + generation: number; +}; + +type ModelLibraryParameters = { + canonicalize: (refId: RefId) => ModelKey; + fetch: (refId: RefId) => Promise>; + docRef?: (refId: RefId) => Promise; + theories: TheoryLibrary; +}; + +type ModelHandle = { + docHandle: DocHandle; + destroy: () => void; +}; + +type ModelKey = string & { __modelLibraryKey: true }; + +/** Create a `ModelLibrary` from an API object within a Solid component. */ +export function createModelLibraryWithApi(api: Api, theories: TheoryLibrary): ModelLibrary { + const library = ModelLibrary.withApi(api, theories); + onCleanup(() => library.destroy()); + return library; +} + +/** Create a `ModelLibrary` from an Automerge repo within a Solid component. */ +export function createModelLibraryWithRepo( + repo: Repo, + theories: TheoryLibrary, +): ModelLibrary { + const library = ModelLibrary.withRepo(repo, theories); + onCleanup(() => library.destroy()); + return library; +} + +/** A reactive library of models. + +Maintains a library of models, each associated with an Automerge document, +elaborating and validating a model when its associated document changes and +caching the result. Moreover, the cache is reactive when used in a Solid +reactive context. + */ +export class ModelLibrary { + private entries: ReactiveMap; + private handles: Map; + private isElaborating: Set; + private params: ModelLibraryParameters; + + constructor(params: ModelLibraryParameters) { + this.entries = new ReactiveMap(); + this.handles = new Map(); + this.isElaborating = new Set(); + this.params = params; + } + + get size(): number { + return this.entries.size; + } + + static withApi(api: Api, theories: TheoryLibrary): ModelLibrary { + return new ModelLibrary({ + canonicalize(refId) { + invariant(uuid.validate(refId), () => `Ref ID is not a valid UUID: ${refId}`); + return refId as ModelKey; + }, + fetch(refId) { + return api.getDocHandle(refId, "model"); + }, + async docRef(refId) { + const permissions = await api.getPermissions(refId); + return { refId, permissions }; + }, + theories, + }); + } + + static withRepo(repo: Repo, theories: TheoryLibrary): ModelLibrary { + return new ModelLibrary({ + canonicalize(docId) { + return interpretAsDocumentId(docId) as string as ModelKey; + }, + fetch(docId) { + return findAndMigrate(repo, docId, "model"); + }, + theories, + }); + } + + /** Destroys the model library. + + Removes all cached document handles and associated event handlers. If you + create a model library in a component by calling `createModelLibrary`, this + method will be called automatically when the component unmounts. It is safe + to call this method multiple times. + */ + destroy() { + for (const handle of this.handles.values()) { + handle.destroy(); + } + this.handles.clear(); + this.entries.clear(); + } + + /** Adds a model to the library, if it has not already been added. */ + private async addModel(refId: RefId) { + const key = this.params.canonicalize(refId); + if (this.entries.has(key)) { + return; + } + + const docHandle = await this.params.fetch(refId); + const [theory, validatedModel] = await this.elaborateAndValidate(key, docHandle.doc()); + + const onChange = (payload: DocHandleChangePayload) => + this.onChange(key, payload); + docHandle.on("change", onChange); + + this.handles.set(key, { + docHandle, + destroy() { + docHandle.off("change", onChange); + }, + }); + + this.entries.set(key, { + theory, + validatedModel, + generation: 1, + }); + } + + private async onChange(key: ModelKey, payload: DocHandleChangePayload) { + const doc = payload.doc; + if (payload.patches.some((patch) => isPatchToFormalContent(doc, patch))) { + const [theory, validatedModel] = await this.elaborateAndValidate(key, doc); + + const generation = (this.entries.get(key)?.generation ?? 0) + 1; + this.entries.set(key, { theory, validatedModel, generation }); + } + } + + /** Gets reactive accessor for elaborated model. */ + async getElaboratedModel(refId: RefId): Promise> { + await this.addModel(refId); + + const key = this.params.canonicalize(refId); + return () => this.entries.get(key); + } + + /** Gets "live" model containing a reactive model document. */ + async getLiveModel(refId: RefId): Promise { + await this.addModel(refId); + + const key = this.params.canonicalize(refId); + const docHandle = this.handles.get(key)?.docHandle; + invariant(docHandle); + + const docRef = this.params.docRef ? await this.params.docRef(refId) : undefined; + const liveDoc = makeLiveDoc(docHandle, docRef); + return makeLiveModel(liveDoc, () => this.entries.get(key)); + } + + /** Use elaborated model in a component. */ + useElaboratedModel(refId: () => RefId | undefined): Accessor { + const [resource] = createResource(refId, (refId) => this.getElaboratedModel(refId)); + return () => resource()?.(); + } + + /** Use "live" model in a component. */ + useLiveModel(refId: () => RefId | undefined): Accessor { + const [liveModel] = createResource(refId, (refId) => this.getLiveModel(refId)); + return liveModel; + } + + // Outer method detects cycles to avoid looping infinitely. + private async elaborateAndValidate( + key: ModelKey, + doc: ModelDocument, + ): Promise<[Theory, ValidatedModel]> { + const theory = await this.params.theories.get(doc.theory); + + let validatedModel: ValidatedModel; + try { + if (this.isElaborating.has(key)) { + const error = "Model contains a cycle of instantiations"; + validatedModel = { tag: "Illformed", error }; + } else { + this.isElaborating.add(key); + validatedModel = await this._elaborateAndValidate(doc.notebook, theory.theory); + } + } finally { + this.isElaborating.delete(key); + } + + return [theory, validatedModel]; + } + + // Inner method actually elaborates. Do not call directly! + private async _elaborateAndValidate( + notebook: ModelNotebook, + theory: DblTheory, + ): Promise { + const instantiated = new DblModelMap(); + for (const cell of NotebookUtils.getFormalContent(notebook)) { + if (!(cell.tag === "instantiation" && cell.model)) { + continue; + } + const refId = cell.model._id; + if (instantiated.has(refId)) { + continue; + } + + await this.addModel(refId as RefId); + const entry = this.entries.get(this.params.canonicalize(refId as RefId)); + invariant(entry); + if (entry.validatedModel.tag === "Illformed") { + const error = `Instantiated model is ill-formed: ${entry.validatedModel.error}`; + return { tag: "Illformed", error }; + } + instantiated.set(refId, entry.validatedModel.model); + } + + return elaborateAndValidateModel(notebook, instantiated, theory); + } +} + +/** Elaborate and then validate a model notebook. */ +function elaborateAndValidateModel( + notebook: ModelNotebook, + instantiated: DblModelMap, + theory: DblTheory, +): ValidatedModel { + let model: DblModel; + try { + model = elaborateModel(notebook, instantiated, theory); + } catch (e) { + return { tag: "Illformed", error: String(e) }; + } + const result = model.validate(); + if (result.tag === "Ok") { + return { tag: "Valid", model }; + } else { + return { tag: "Invalid", model, errors: result.content }; + } +} + +/** Does the patch to the model document affect its formal content? */ +function isPatchToFormalContent(doc: ModelDocument, patch: Patch): boolean { + const path = patch.path; + if (!(path[0] === "theory" || path[0] === "notebook")) { + // Ignore changes to top-level data like document name. + return false; + } + if (path[0] === "notebook" && path[1] === "cellContents" && path[2]) { + // Ignores changes to cells without formal content. + const cell = doc.notebook.cellContents[path[2]]; + if (cell?.tag !== "formal") { + return false; + } + // TODO: When only the human-readable labels are changed, update the + // id-label mappings but don't re-elaborate the model! + } + return true; +} + +const makeLiveModel = ( + liveDoc: LiveDoc, + getEntry: Accessor, +): LiveModelDocument => ({ + type: "model", + liveDoc, + theory() { + return getEntry()?.theory; + }, + elaboratedModel() { + const entry = getEntry(); + if (entry && entry.validatedModel.tag !== "Illformed") { + return entry.validatedModel.model; + } + }, + validatedModel() { + return getEntry()?.validatedModel; + }, +}); diff --git a/packages/frontend/src/model/types.ts b/packages/frontend/src/model/types.ts index 6c15b9309..42f2b0fdd 100644 --- a/packages/frontend/src/model/types.ts +++ b/packages/frontend/src/model/types.ts @@ -1,6 +1,6 @@ import { v7 } from "uuid"; -import type { DblModel, ModelJudgment, MorType, ObType, QualifiedName } from "catlog-wasm"; +import type { DblModel, Link, ModelJudgment, MorType, ObType, QualifiedName } from "catlog-wasm"; import { deepCopyJSON } from "../util/deepcopy"; /** Declaration of an object in a model. */ @@ -31,6 +31,17 @@ export const newMorphismDecl = (morType: MorType): MorphismDecl => ({ cod: null, }); +/** Create a new instantiation of an existing model. */ +export const newInstantiatedModel = ( + model?: Link | null, +): ModelJudgment & { tag: "instantiation" } => ({ + tag: "instantiation", + id: v7(), + name: "", + model: model ?? null, + specializations: [], +}); + /** Duplicate a model judgment, creating a fresh UUID when applicable. */ export const duplicateModelJudgment = (jgmt: ModelJudgment): ModelJudgment => ({ ...deepCopyJSON(jgmt), diff --git a/packages/frontend/src/notebook/notebook_editor.tsx b/packages/frontend/src/notebook/notebook_editor.tsx index 95fe0efc4..f06180847 100644 --- a/packages/frontend/src/notebook/notebook_editor.tsx +++ b/packages/frontend/src/notebook/notebook_editor.tsx @@ -1,7 +1,7 @@ import { getReorderDestinationIndex } from "@atlaskit/pragmatic-drag-and-drop-hitbox/util/get-reorder-destination-index"; import { monitorForElements } from "@atlaskit/pragmatic-drag-and-drop/element/adapter"; import type { DocHandle, Prop } from "@automerge/automerge-repo"; -import { type KbdKey, createShortcut } from "@solid-primitives/keyboard"; +import { makeEventListener } from "@solid-primitives/event-listener"; import ListPlus from "lucide-solid/icons/list-plus"; import { type Component, @@ -17,6 +17,7 @@ import invariant from "tiny-invariant"; import type { Cell, Notebook } from "catlog-wasm"; import { type Completion, IconButton } from "../components"; +import { type KbdKey, type ModifierKey, keyEventHasModifier } from "../util/keyboard"; import { type CellActions, type FormalCellEditorProps, @@ -137,7 +138,7 @@ export function NotebookEditor(props: { { name: "Text", description: "Start writing text", - shortcut: [cellShortcutModifier, "T"], + shortcut: ["T"], construct: () => newRichTextCell(), }, ...(props.cellConstructors ?? []), @@ -149,21 +150,27 @@ export function NotebookEditor(props: { return { name, description, - shortcut, + shortcut: shortcut && [cellShortcutModifier, ...shortcut], onComplete: () => replaceCellWith(i, cc.construct()), }; }); - createEffect(() => { + makeEventListener(window, "keydown", (evt) => { if (props.noShortcuts) { return; } - for (const command of insertCommands()) { - if (command.shortcut) { - createShortcut(command.shortcut, () => command.onComplete?.()); + if (keyEventHasModifier(evt, cellShortcutModifier)) { + for (const command of insertCommands()) { + if (command.shortcut && evt.key.toUpperCase() === command.shortcut[0]) { + command.onComplete?.(); + return evt.preventDefault(); + } } } - createShortcut(["Shift", "Enter"], () => addAfterActiveCell(newStemCell())); + if (evt.shiftKey && evt.key === "Enter") { + addAfterActiveCell(newStemCell()); + return evt.preventDefault(); + } }); // Set up drag and drop for notebook cells. Each cell reports to the @@ -369,4 +376,4 @@ The choice is platform-specific: On Mac, the Alt/Option key remaps keys, so we use Control, whereas on other platforms Control tends to be already bound in other shortcuts, so we Alt. */ -export const cellShortcutModifier: KbdKey = navigator.userAgent.includes("Mac") ? "Control" : "Alt"; +const cellShortcutModifier: ModifierKey = navigator.userAgent.includes("Mac") ? "Control" : "Alt"; diff --git a/packages/frontend/src/stdlib/analyses/decapodes.tsx b/packages/frontend/src/stdlib/analyses/decapodes.tsx index 28284e43a..0735a601f 100644 --- a/packages/frontend/src/stdlib/analyses/decapodes.tsx +++ b/packages/frontend/src/stdlib/analyses/decapodes.tsx @@ -12,6 +12,7 @@ import { createNumericalColumn, } from "../../components"; import type { LiveDiagramDocument } from "../../diagram"; +import { NotebookUtils } from "../../notebook"; import { uniqueIndexArray } from "../../util/indexing"; import { PDEPlot2D, type PDEPlotData2D } from "../../visualization"; import { createJuliaKernel, executeAndRetrieve } from "./jupyter"; @@ -343,7 +344,7 @@ const makeSimulationData = ( diagram: validatedDiagram.diagram.judgments(), // FIXME: Depending on judgments from notebook means that model cannot // be composed of other models. - model: liveDiagram.liveModel.formalJudgments(), + model: NotebookUtils.getFormalContent(liveDiagram.liveModel.liveDoc.doc.notebook), domain, mesh, initialConditions, diff --git a/packages/frontend/src/stdlib/theories/petri-net.ts b/packages/frontend/src/stdlib/theories/petri-net.ts index 7ff395f64..fe6922e52 100644 --- a/packages/frontend/src/stdlib/theories/petri-net.ts +++ b/packages/frontend/src/stdlib/theories/petri-net.ts @@ -39,6 +39,7 @@ export default function createPetriNetTheory(theoryMeta: TheoryMeta): Theory { id: "diagram", name: "Visualization", description: "Visualize the Petri net", + help: "visualization", }), analyses.massAction({ simulate(model, data) { diff --git a/packages/frontend/src/theory/theory.ts b/packages/frontend/src/theory/theory.ts index 74391826f..a642b5d78 100644 --- a/packages/frontend/src/theory/theory.ts +++ b/packages/frontend/src/theory/theory.ts @@ -1,9 +1,8 @@ -import type { KbdKey } from "@solid-primitives/keyboard"; - import type { DblModel, DblTheory, MorType, ObOp, ObType } from "catlog-wasm"; import { MorTypeIndex, ObTypeIndex } from "catlog-wasm"; import type { DiagramAnalysisComponent, ModelAnalysisComponent } from "../analysis"; import { uniqueIndexArray } from "../util/indexing"; +import type { KbdKey } from "../util/keyboard"; import type { ArrowStyle } from "../visualization"; /** A double theory configured for the frontend. diff --git a/packages/frontend/src/util/keyboard.ts b/packages/frontend/src/util/keyboard.ts new file mode 100644 index 000000000..0f44b3807 --- /dev/null +++ b/packages/frontend/src/util/keyboard.ts @@ -0,0 +1,26 @@ +/** Utilities for keyboard events and shortcuts. + +The types `ModifierKey` and `KbdKey` are borrowed from +`@solid-primitives/keyboard`, a package that we're no longer using. + +@module + */ + +export type ModifierKey = "Alt" | "Control" | "Meta" | "Shift"; +export type KbdKey = ModifierKey | (string & {}); + +/** Returns whether the modifier key is active in the keyboard event. */ +export function keyEventHasModifier(evt: KeyboardEvent, key: ModifierKey): boolean { + switch (key) { + case "Alt": + return evt.altKey; + case "Control": + return evt.ctrlKey; + case "Meta": + return evt.metaKey; + case "Shift": + return evt.shiftKey; + default: + throw new Error(`Key is not a modifier: ${key}`); + } +} diff --git a/packages/frontend/src/util/test_util.ts b/packages/frontend/src/util/test_util.ts index b86174401..fa53104a0 100644 --- a/packages/frontend/src/util/test_util.ts +++ b/packages/frontend/src/util/test_util.ts @@ -1,3 +1,5 @@ +import type { JsonValue } from "catcolab-api"; +import type { Document } from "catlog-wasm"; import { FirebaseError } from "firebase/app"; import { type Auth, @@ -17,3 +19,16 @@ export async function initTestUserAuth(auth: Auth, email: string, password: stri } } } + +/** Creates a valid test document with the given name. */ +export function createTestDocument(name: string): JsonValue { + const doc: Document = { + type: "model", + name, + theory: "empty", + notebook: { cellOrder: [], cellContents: {} }, + version: "1", + }; + + return doc as unknown as JsonValue; +} diff --git a/packages/notebook-types/src/v0/api.rs b/packages/notebook-types/src/v0/api.rs index 9d3d97e6d..769045106 100644 --- a/packages/notebook-types/src/v0/api.rs +++ b/packages/notebook-types/src/v0/api.rs @@ -1,5 +1,3 @@ -use uuid::Uuid; - use serde::{Deserialize, Serialize}; use tsify::Tsify; @@ -8,20 +6,22 @@ use tsify::Tsify; /// Such a reference identifies a specific document, possibly at a specific /// version. The keys are prefixed with an underscore, e.g. `_id` instead of /// `id`, to avoid conflicts with other keys and unambiguously signal that the -/// ID and other data apply at the *database* level, rather than merely the -/// *document* level. The same convention is used in document databases like -/// CouchDB and MongoDB. +/// data occur at the *database* level, rather than merely the *document* level. +/// The same convention is used in document databases like CouchDB and MongoDB. #[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Tsify)] #[tsify(into_wasm_abi, from_wasm_abi)] #[tsify(missing_as_null)] pub struct StableRef { /// Unique identifier of the referenced document. + /// + /// When using CatColab's official backend, this should be a document ref ID + /// (a UUID). #[serde(rename = "_id")] - pub id: Uuid, + pub id: String, /// Version of the document. /// - /// If null, refers to the head snapshot of document. This is the case when + /// If null, refers to the head commit of document. This is the case when /// the referenced document will receive live updates. #[serde(rename = "_version")] pub version: Option, @@ -44,6 +44,19 @@ pub struct Link { #[serde(flatten)] pub stable_ref: StableRef, - /// Type of the link, such as "diagramIn" or "analysisOf". - pub r#type: String, + pub r#type: LinkType, +} + +/// Type of link between documents. +#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Tsify)] +#[tsify(into_wasm_abi, from_wasm_abi)] +pub enum LinkType { + #[serde(rename = "analysis-of")] + AnalysisOf, + + #[serde(rename = "diagram-in")] + DiagramIn, + + #[serde(rename = "instantiation")] + Instantiation, } diff --git a/packages/notebook-types/src/v0/diagram_judgment.rs b/packages/notebook-types/src/v0/diagram_judgment.rs index e8bbf5e69..b98746a64 100644 --- a/packages/notebook-types/src/v0/diagram_judgment.rs +++ b/packages/notebook-types/src/v0/diagram_judgment.rs @@ -9,7 +9,7 @@ use super::theory::{MorType, ObType}; #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Tsify)] #[tsify(into_wasm_abi, from_wasm_abi, missing_as_null)] pub struct DiagramObDecl { - /// Human-readable name of object. + /// Human-readable label for object. pub name: String, /// Globally unique identifier of object. @@ -27,7 +27,7 @@ pub struct DiagramObDecl { #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Tsify)] #[tsify(into_wasm_abi, from_wasm_abi, missing_as_null)] pub struct DiagramMorDecl { - /// Human-readable name of morphism. + /// Human-readable label for morphism. pub name: String, /// Globally unique identifier of morphism. @@ -47,13 +47,16 @@ pub struct DiagramMorDecl { pub cod: Option, } -/// A judgment in the definition of a diagram in a model. +/// A judgment defining part of a diagram in a model. #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Tsify)] #[serde(tag = "tag")] #[tsify(into_wasm_abi, from_wasm_abi)] pub enum DiagramJudgment { + /// Declares a generating object in the diagram. #[serde(rename = "object")] Object(DiagramObDecl), + + /// Declares a generating morphism in the diagram. #[serde(rename = "morphism")] Morphism(DiagramMorDecl), } diff --git a/packages/notebook-types/src/v0/model_judgment.rs b/packages/notebook-types/src/v0/model_judgment.rs index 0a2b631cd..552bbf5f7 100644 --- a/packages/notebook-types/src/v0/model_judgment.rs +++ b/packages/notebook-types/src/v0/model_judgment.rs @@ -2,6 +2,7 @@ use serde::{Deserialize, Serialize}; use tsify::Tsify; use uuid::Uuid; +use super::api::Link; use super::model::Ob; use super::theory::{MorType, ObType}; @@ -9,7 +10,7 @@ use super::theory::{MorType, ObType}; #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Tsify)] #[tsify(into_wasm_abi, from_wasm_abi, missing_as_null)] pub struct ObDecl { - /// Human-readable name of object. + /// Human-readable label for object. pub name: String, /// Globally unique identifier of object. @@ -24,7 +25,7 @@ pub struct ObDecl { #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Tsify)] #[tsify(into_wasm_abi, from_wasm_abi, missing_as_null)] pub struct MorDecl { - /// Human-readable name of morphism. + /// Human-readable label for morphism. pub name: String, /// Globally unique identifier of morphism. @@ -41,12 +42,48 @@ pub struct MorDecl { pub cod: Option, } +/// Instantiates an existing model into the current model. +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Tsify)] +#[tsify(into_wasm_abi, from_wasm_abi, missing_as_null)] +pub struct InstantiatedModel { + /// Human-readable label for the instantiation. + pub name: String, + + /// Globally unique identifer of the instantiation. + pub id: Uuid, + + /// Link to the model to instantiate. + pub model: Option, + + /// List of specializations to perform on the instantiated model. + pub specializations: Vec, +} + +/// A specialization of a generating object in an instantiated model. +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Tsify)] +#[tsify(into_wasm_abi, from_wasm_abi, missing_as_null)] +pub struct SpecializeModel { + /// ID (qualified name) of generating object to specialize. + pub id: Option, + + /// Object to insert as the specialization. + pub ob: Option, +} + +/// A judgment defining part of a model of a double theory. #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Tsify)] #[serde(tag = "tag")] #[tsify(into_wasm_abi, from_wasm_abi)] pub enum ModelJudgment { + /// Declares a generating object of the model. #[serde(rename = "object")] Object(ObDecl), + + /// Declares a generating morphism of the model. #[serde(rename = "morphism")] Morphism(MorDecl), + + /// Instantiates an existing model into this model. + #[serde(rename = "instantiation")] + Instantiation(InstantiatedModel), } diff --git a/packages/notebook-types/src/v1/document.rs b/packages/notebook-types/src/v1/document.rs index ba0f2bfeb..86f67e294 100644 --- a/packages/notebook-types/src/v1/document.rs +++ b/packages/notebook-types/src/v1/document.rs @@ -3,8 +3,6 @@ use crate::v0::AnalysisType; use super::analysis::Analysis; use super::api::Link; -use super::diagram_judgment::DiagramJudgment; -use super::model_judgment::ModelJudgment; use super::notebook::Notebook; use serde::{Deserialize, Serialize}; @@ -17,7 +15,7 @@ use tsify::Tsify; pub struct ModelDocumentContent { pub name: String, pub theory: String, - pub notebook: Notebook, + pub notebook: Notebook, pub version: String, } @@ -27,7 +25,7 @@ pub struct DiagramDocumentContent { pub name: String, #[serde(rename = "diagramIn")] pub diagram_in: Link, - pub notebook: Notebook, + pub notebook: Notebook, pub version: String, } diff --git a/packages/notebook-types/src/v1/notebook.rs b/packages/notebook-types/src/v1/notebook.rs index 8d00b4ec1..b469f2b20 100644 --- a/packages/notebook-types/src/v1/notebook.rs +++ b/packages/notebook-types/src/v1/notebook.rs @@ -16,11 +16,26 @@ pub struct Notebook { pub cell_order: Vec, } +#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Tsify)] +#[tsify(into_wasm_abi, from_wasm_abi)] +pub struct ModelNotebook(pub Notebook); + +#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Tsify)] +#[tsify(into_wasm_abi, from_wasm_abi)] +pub struct DiagramNotebook(pub Notebook); + impl Notebook { pub fn cells(&self) -> impl Iterator> { self.cell_order.iter().filter_map(|id| self.cell_contents.get(id)) } + pub fn formal_content(&self) -> impl Iterator { + self.cells().filter_map(|cell| match cell { + NotebookCell::Formal { content, .. } => Some(content), + _ => None, + }) + } + pub fn migrate_from_v0(old: v0::Notebook) -> Self { let mut cell_contents = HashMap::new(); let mut cell_order = Vec::new(); diff --git a/packages/patchwork/src/analysis_pane.tsx b/packages/patchwork/src/analysis_pane.tsx index 724ff13a5..a38542cd9 100644 --- a/packages/patchwork/src/analysis_pane.tsx +++ b/packages/patchwork/src/analysis_pane.tsx @@ -3,14 +3,17 @@ import { createResource, Switch, Match } from "solid-js"; import { getLiveAnalysisFromRepo } from "../../frontend/src/analysis"; import { AnalysisNotebookEditor } from "../../frontend/src/analysis/analysis_editor"; +import { createModelLibraryWithRepo } from "../../frontend/src/model"; import { TheoryLibraryContext } from "../../frontend/src/theory"; import { stdTheories } from "../../frontend/src/stdlib"; import type { SolidToolProps } from "./tools"; export function AnalysisPaneComponent(props: SolidToolProps) { + const models = createModelLibraryWithRepo(props.repo, stdTheories); + const [liveAnalysis] = createResource( () => props.docUrl, - (docUrl) => getLiveAnalysisFromRepo(docUrl as AutomergeUrl, props.repo, stdTheories), + (docUrl) => getLiveAnalysisFromRepo(docUrl as AutomergeUrl, props.repo, models), ); return ( diff --git a/packages/patchwork/src/model_pane.tsx b/packages/patchwork/src/model_pane.tsx index fea91f2e5..ca04ed38a 100644 --- a/packages/patchwork/src/model_pane.tsx +++ b/packages/patchwork/src/model_pane.tsx @@ -1,18 +1,20 @@ -import type { AutomergeUrl } from "@automerge/automerge-repo"; +import type { AnyDocumentId } from "@automerge/automerge-repo"; import { createResource, Switch, Match } from "solid-js"; -import { getLiveModelFromRepo } from "../../frontend/src/model"; +import { createModelLibraryWithRepo } from "../../frontend/src/model"; import { ModelPane } from "../../frontend/src/model/model_editor"; import { TheoryLibraryContext } from "../../frontend/src/theory"; import { stdTheories } from "../../frontend/src/stdlib"; import type { SolidToolProps } from "./tools"; export function ModelPaneComponent(props: SolidToolProps) { + const models = createModelLibraryWithRepo(props.repo, stdTheories); + const [liveModel] = createResource( () => props.docUrl, async (docUrl) => { try { - return await getLiveModelFromRepo(docUrl as AutomergeUrl, props.repo, stdTheories); + return await models.getLiveModel(docUrl as AnyDocumentId); } catch (error) { console.error("=== Model Loading Failed ==="); console.error("Error:", error);