diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..b889b87
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,43 @@
+# IDE / Editor
+.vscode/
+.idea/
+*.swp
+*.swo
+*~
+.DS_Store
+
+# OS
+.DS_Store
+.DS_Store?
+._*
+.Spotlight-V100
+.Trashes
+ehthumbs.db
+Thumbs.db
+desktop.ini
+
+# Logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# Environment variables
+.env
+.env.local
+.env.*.local
+
+# Build outputs
+dist/
+build/
+*.tsbuildinfo
+
+# Test coverage
+coverage/
+.nyc_output/
+
+# Temporary files
+*.tmp
+*.temp
+.cache/
+
diff --git a/deno.json b/deno.json
index 54feb3b..220b2b0 100644
--- a/deno.json
+++ b/deno.json
@@ -4,14 +4,14 @@
"license": "Apache",
"exports": "./src/main/main.ts",
"imports": {
- "@actions/github": "npm:@actions/github@^6.0.0",
- "@octokit/core": "npm:@octokit/core@^6.1.4",
- "@std/assert": "jsr:@std/assert@^1.0.13",
- "@std/cli": "jsr:@std/cli@^1.0.17",
- "@std/fs": "jsr:@std/fs@^1.0.17",
- "@std/path": "jsr:@std/path@^1.0.9",
- "@std/testing": "jsr:@std/testing@^1.0.11",
- "@types/node": "npm:@types/node@^22.10.2",
+ "@actions/github": "npm:@actions/github@^6.0.1",
+ "@octokit/core": "npm:@octokit/core@^6.1.6",
+ "@std/assert": "jsr:@std/assert@^1.0.16",
+ "@std/cli": "jsr:@std/cli@^1.0.25",
+ "@std/fs": "jsr:@std/fs@^1.0.21",
+ "@std/path": "jsr:@std/path@^1.1.4",
+ "@std/testing": "jsr:@std/testing@^1.0.16",
+ "@types/node": "npm:@types/node@^22.19.3",
"xml2js": "npm:xml2js@^0.6.2"
},
"tasks": {
diff --git a/deno.lock b/deno.lock
index 61fccfd..198b37a 100644
--- a/deno.lock
+++ b/deno.lock
@@ -1,68 +1,76 @@
{
- "version": "4",
+ "version": "5",
"specifiers": {
- "jsr:@std/assert@^1.0.12": "1.0.13",
- "jsr:@std/assert@^1.0.13": "1.0.13",
- "jsr:@std/cli@^1.0.17": "1.0.17",
- "jsr:@std/data-structures@^1.0.6": "1.0.7",
- "jsr:@std/fs@^1.0.16": "1.0.17",
- "jsr:@std/fs@^1.0.17": "1.0.17",
- "jsr:@std/internal@^1.0.6": "1.0.6",
- "jsr:@std/path@^1.0.8": "1.0.9",
- "jsr:@std/path@^1.0.9": "1.0.9",
- "jsr:@std/testing@^1.0.11": "1.0.11",
- "npm:@actions/github@*": "6.0.0_@octokit+core@5.2.0",
- "npm:@actions/github@6": "6.0.0_@octokit+core@5.2.0",
- "npm:@octokit/core@*": "6.1.4",
- "npm:@octokit/core@^6.1.4": "6.1.4",
+ "jsr:@std/assert@^1.0.15": "1.0.16",
+ "jsr:@std/assert@^1.0.16": "1.0.16",
+ "jsr:@std/cli@^1.0.25": "1.0.25",
+ "jsr:@std/data-structures@^1.0.9": "1.0.9",
+ "jsr:@std/fs@^1.0.19": "1.0.21",
+ "jsr:@std/fs@^1.0.21": "1.0.21",
+ "jsr:@std/internal@^1.0.12": "1.0.12",
+ "jsr:@std/path@^1.1.2": "1.1.4",
+ "jsr:@std/path@^1.1.4": "1.1.4",
+ "jsr:@std/testing@^1.0.16": "1.0.16",
+ "npm:@actions/github@^6.0.1": "6.0.1_@octokit+core@5.2.2",
+ "npm:@octokit/core@^6.1.6": "6.1.6",
"npm:@types/node@*": "22.5.4",
- "npm:@types/node@^22.10.2": "22.10.2",
+ "npm:@types/node@^22.19.3": "22.19.3",
"npm:xml2js@~0.6.2": "0.6.2"
},
"jsr": {
- "@std/assert@1.0.13": {
- "integrity": "ae0d31e41919b12c656c742b22522c32fb26ed0cba32975cb0de2a273cb68b29",
+ "@std/assert@1.0.16": {
+ "integrity": "6a7272ed1eaa77defe76e5ff63ca705d9c495077e2d5fd0126d2b53fc5bd6532",
"dependencies": [
"jsr:@std/internal"
]
},
- "@std/cli@1.0.17": {
- "integrity": "e15b9abe629e17be90cc6216327f03a29eae613365f1353837fa749aad29ce7b"
+ "@std/cli@1.0.25": {
+ "integrity": "1f85051b370c97a7a9dfc6ba626e7ed57a91bea8c081597276d1e78d929d8c91",
+ "dependencies": [
+ "jsr:@std/internal"
+ ]
},
- "@std/data-structures@1.0.7": {
- "integrity": "16932d2c8d281f65eaaa2209af2473209881e33b1ced54cd1b015e7b4cdbb0d2"
+ "@std/data-structures@1.0.9": {
+ "integrity": "033d6e17e64bf1f84a614e647c1b015fa2576ae3312305821e1a4cb20674bb4d"
},
- "@std/fs@1.0.17": {
- "integrity": "1c00c632677c1158988ef7a004cb16137f870aafdb8163b9dce86ec652f3952b",
+ "@std/fs@1.0.21": {
+ "integrity": "d720fe1056d78d43065a4d6e0eeb2b19f34adb8a0bc7caf3a4dbf1d4178252cd",
"dependencies": [
- "jsr:@std/path@^1.0.9"
+ "jsr:@std/internal",
+ "jsr:@std/path@^1.1.4"
]
},
- "@std/internal@1.0.6": {
- "integrity": "9533b128f230f73bd209408bb07a4b12f8d4255ab2a4d22a1fd6d87304aca9a4"
+ "@std/internal@1.0.12": {
+ "integrity": "972a634fd5bc34b242024402972cd5143eac68d8dffaca5eaa4dba30ce17b027"
},
- "@std/path@1.0.9": {
- "integrity": "260a49f11edd3db93dd38350bf9cd1b4d1366afa98e81b86167b4e3dd750129e"
+ "@std/path@1.1.4": {
+ "integrity": "1d2d43f39efb1b42f0b1882a25486647cb851481862dc7313390b2bb044314b5",
+ "dependencies": [
+ "jsr:@std/internal"
+ ]
},
- "@std/testing@1.0.11": {
- "integrity": "12b3db12d34f0f385a26248933bde766c0f8c5ad8b6ab34d4d38f528ab852f48",
+ "@std/testing@1.0.16": {
+ "integrity": "a917ffdeb5924c9be436dc78bc32e511760e14d3a96e49c607fc5ecca86d0092",
"dependencies": [
- "jsr:@std/assert@^1.0.12",
+ "jsr:@std/assert@^1.0.15",
"jsr:@std/data-structures",
- "jsr:@std/fs@^1.0.16",
+ "jsr:@std/fs@^1.0.19",
"jsr:@std/internal",
- "jsr:@std/path@^1.0.8"
+ "jsr:@std/path@^1.1.2"
]
}
},
"npm": {
- "@actions/github@6.0.0_@octokit+core@5.2.0": {
- "integrity": "sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g==",
+ "@actions/github@6.0.1_@octokit+core@5.2.2": {
+ "integrity": "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw==",
"dependencies": [
"@actions/http-client",
- "@octokit/core@5.2.0",
+ "@octokit/core@5.2.2",
"@octokit/plugin-paginate-rest",
- "@octokit/plugin-rest-endpoint-methods"
+ "@octokit/plugin-rest-endpoint-methods",
+ "@octokit/request@8.4.1",
+ "@octokit/request-error@5.1.1",
+ "undici"
]
},
"@actions/http-client@2.2.3": {
@@ -81,41 +89,41 @@
"@octokit/auth-token@5.1.2": {
"integrity": "sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw=="
},
- "@octokit/core@5.2.0": {
- "integrity": "sha512-1LFfa/qnMQvEOAdzlQymH0ulepxbxnCYAKJZfMci/5XJyIHWgEYnDmgnKakbTh7CH2tFQ5O60oYDvns4i9RAIg==",
+ "@octokit/core@5.2.2": {
+ "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==",
"dependencies": [
"@octokit/auth-token@4.0.0",
"@octokit/graphql@7.1.1",
"@octokit/request@8.4.1",
"@octokit/request-error@5.1.1",
- "@octokit/types@13.8.0",
+ "@octokit/types@13.10.0",
"before-after-hook@2.2.3",
"universal-user-agent@6.0.1"
]
},
- "@octokit/core@6.1.4": {
- "integrity": "sha512-lAS9k7d6I0MPN+gb9bKDt7X8SdxknYqAMh44S5L+lNqIN2NuV8nvv3g8rPp7MuRxcOpxpUIATWprO0C34a8Qmg==",
+ "@octokit/core@6.1.6": {
+ "integrity": "sha512-kIU8SLQkYWGp3pVKiYzA5OSaNF5EE03P/R8zEmmrG6XwOg5oBjXyQVVIauQ0dgau4zYhpZEhJrvIYt6oM+zZZA==",
"dependencies": [
"@octokit/auth-token@5.1.2",
- "@octokit/graphql@8.2.1",
- "@octokit/request@9.2.2",
- "@octokit/request-error@6.1.7",
- "@octokit/types@13.8.0",
+ "@octokit/graphql@8.2.2",
+ "@octokit/request@9.2.4",
+ "@octokit/request-error@6.1.8",
+ "@octokit/types@14.1.0",
"before-after-hook@3.0.2",
- "universal-user-agent@7.0.2"
+ "universal-user-agent@7.0.3"
]
},
- "@octokit/endpoint@10.1.3": {
- "integrity": "sha512-nBRBMpKPhQUxCsQQeW+rCJ/OPSMcj3g0nfHn01zGYZXuNDvvXudF/TYY6APj5THlurerpFN4a/dQAIAaM6BYhA==",
+ "@octokit/endpoint@10.1.4": {
+ "integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==",
"dependencies": [
- "@octokit/types@13.8.0",
- "universal-user-agent@7.0.2"
+ "@octokit/types@14.1.0",
+ "universal-user-agent@7.0.3"
]
},
"@octokit/endpoint@9.0.6": {
"integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
"dependencies": [
- "@octokit/types@13.8.0",
+ "@octokit/types@13.10.0",
"universal-user-agent@6.0.1"
]
},
@@ -123,50 +131,53 @@
"integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
"dependencies": [
"@octokit/request@8.4.1",
- "@octokit/types@13.8.0",
+ "@octokit/types@13.10.0",
"universal-user-agent@6.0.1"
]
},
- "@octokit/graphql@8.2.1": {
- "integrity": "sha512-n57hXtOoHrhwTWdvhVkdJHdhTv0JstjDbDRhJfwIRNfFqmSo1DaK/mD2syoNUoLCyqSjBpGAKOG0BuwF392slw==",
+ "@octokit/graphql@8.2.2": {
+ "integrity": "sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA==",
"dependencies": [
- "@octokit/request@9.2.2",
- "@octokit/types@13.8.0",
- "universal-user-agent@7.0.2"
+ "@octokit/request@9.2.4",
+ "@octokit/types@14.1.0",
+ "universal-user-agent@7.0.3"
]
},
"@octokit/openapi-types@20.0.0": {
"integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="
},
- "@octokit/openapi-types@23.0.1": {
- "integrity": "sha512-izFjMJ1sir0jn0ldEKhZ7xegCTj/ObmEDlEfpFrx4k/JyZSMRHbO3/rBwgE7f3m2DHt+RrNGIVw4wSmwnm3t/g=="
+ "@octokit/openapi-types@24.2.0": {
+ "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="
+ },
+ "@octokit/openapi-types@25.1.0": {
+ "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA=="
},
- "@octokit/plugin-paginate-rest@9.2.2_@octokit+core@5.2.0": {
+ "@octokit/plugin-paginate-rest@9.2.2_@octokit+core@5.2.2": {
"integrity": "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ==",
"dependencies": [
- "@octokit/core@5.2.0",
+ "@octokit/core@5.2.2",
"@octokit/types@12.6.0"
]
},
- "@octokit/plugin-rest-endpoint-methods@10.4.1_@octokit+core@5.2.0": {
+ "@octokit/plugin-rest-endpoint-methods@10.4.1_@octokit+core@5.2.2": {
"integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==",
"dependencies": [
- "@octokit/core@5.2.0",
+ "@octokit/core@5.2.2",
"@octokit/types@12.6.0"
]
},
"@octokit/request-error@5.1.1": {
"integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==",
"dependencies": [
- "@octokit/types@13.8.0",
+ "@octokit/types@13.10.0",
"deprecation",
"once"
]
},
- "@octokit/request-error@6.1.7": {
- "integrity": "sha512-69NIppAwaauwZv6aOzb+VVLwt+0havz9GT5YplkeJv7fG7a40qpLt/yZKyiDxAhgz0EtgNdNcb96Z0u+Zyuy2g==",
+ "@octokit/request-error@6.1.8": {
+ "integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==",
"dependencies": [
- "@octokit/types@13.8.0"
+ "@octokit/types@14.1.0"
]
},
"@octokit/request@8.4.1": {
@@ -174,18 +185,18 @@
"dependencies": [
"@octokit/endpoint@9.0.6",
"@octokit/request-error@5.1.1",
- "@octokit/types@13.8.0",
+ "@octokit/types@13.10.0",
"universal-user-agent@6.0.1"
]
},
- "@octokit/request@9.2.2": {
- "integrity": "sha512-dZl0ZHx6gOQGcffgm1/Sf6JfEpmh34v3Af2Uci02vzUYz6qEN6zepoRtmybWXIGXFIK8K9ylE3b+duCWqhArtg==",
+ "@octokit/request@9.2.4": {
+ "integrity": "sha512-q8ybdytBmxa6KogWlNa818r0k1wlqzNC+yNkcQDECHvQo8Vmstrg18JwqJHdJdUiHD2sjlwBgSm9kHkOKe2iyA==",
"dependencies": [
- "@octokit/endpoint@10.1.3",
- "@octokit/request-error@6.1.7",
- "@octokit/types@13.8.0",
+ "@octokit/endpoint@10.1.4",
+ "@octokit/request-error@6.1.8",
+ "@octokit/types@14.1.0",
"fast-content-type-parse",
- "universal-user-agent@7.0.2"
+ "universal-user-agent@7.0.3"
]
},
"@octokit/types@12.6.0": {
@@ -194,16 +205,22 @@
"@octokit/openapi-types@20.0.0"
]
},
- "@octokit/types@13.8.0": {
- "integrity": "sha512-x7DjTIbEpEWXK99DMd01QfWy0hd5h4EN+Q7shkdKds3otGQP+oWE/y0A76i1OvH9fygo4ddvNf7ZvF0t78P98A==",
+ "@octokit/types@13.10.0": {
+ "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
+ "dependencies": [
+ "@octokit/openapi-types@24.2.0"
+ ]
+ },
+ "@octokit/types@14.1.0": {
+ "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
"dependencies": [
- "@octokit/openapi-types@23.0.1"
+ "@octokit/openapi-types@25.1.0"
]
},
- "@types/node@22.10.2": {
- "integrity": "sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==",
+ "@types/node@22.19.3": {
+ "integrity": "sha512-1N9SBnWYOJTrNZCdh/yJE+t910Y128BoyY+zBLWhL3r0TYzlTmFdXrPwHL9DyFZmlEXNQQolTZh3KHV31QDhyA==",
"dependencies": [
- "undici-types@6.20.0"
+ "undici-types@6.21.0"
]
},
"@types/node@22.5.4": {
@@ -239,11 +256,11 @@
"undici-types@6.19.8": {
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw=="
},
- "undici-types@6.20.0": {
- "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="
+ "undici-types@6.21.0": {
+ "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="
},
- "undici@5.28.4": {
- "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==",
+ "undici@5.29.0": {
+ "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==",
"dependencies": [
"@fastify/busboy"
]
@@ -251,8 +268,8 @@
"universal-user-agent@6.0.1": {
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="
},
- "universal-user-agent@7.0.2": {
- "integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
+ "universal-user-agent@7.0.3": {
+ "integrity": "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A=="
},
"wrappy@1.0.2": {
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
@@ -556,14 +573,14 @@
},
"workspace": {
"dependencies": [
- "jsr:@std/assert@^1.0.13",
- "jsr:@std/cli@^1.0.17",
- "jsr:@std/fs@^1.0.17",
- "jsr:@std/path@^1.0.9",
- "jsr:@std/testing@^1.0.11",
- "npm:@actions/github@6",
- "npm:@octokit/core@^6.1.4",
- "npm:@types/node@^22.10.2",
+ "jsr:@std/assert@^1.0.16",
+ "jsr:@std/cli@^1.0.25",
+ "jsr:@std/fs@^1.0.21",
+ "jsr:@std/path@^1.1.4",
+ "jsr:@std/testing@^1.0.16",
+ "npm:@actions/github@^6.0.1",
+ "npm:@octokit/core@^6.1.6",
+ "npm:@types/node@^22.19.3",
"npm:xml2js@~0.6.2"
]
}
diff --git a/src/main/constants.ts b/src/main/constants.ts
new file mode 100644
index 0000000..4595a0a
--- /dev/null
+++ b/src/main/constants.ts
@@ -0,0 +1,17 @@
+/**
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export const EOL = Deno.build.os === "windows" ? "\r\n" : "\n";
diff --git a/src/main/flow_comparator.ts b/src/main/flow_comparator.ts
index f025251..455f3d8 100644
--- a/src/main/flow_comparator.ts
+++ b/src/main/flow_comparator.ts
@@ -18,7 +18,7 @@
* @fileoverview Utility functions to compare two flows.
*/
-import { ParsedFlow } from "./flow_parser.ts";
+import type { ParsedFlow } from "./flow_parser.ts";
import * as flowTypes from "./flow_types.ts";
const OBJECT = "object";
diff --git a/src/main/flow_file_change_detector.ts b/src/main/flow_file_change_detector.ts
index b139eb4..66df6d4 100644
--- a/src/main/flow_file_change_detector.ts
+++ b/src/main/flow_file_change_detector.ts
@@ -20,13 +20,13 @@
*/
import { Configuration } from "./argument_processor.ts";
+import { EOL } from "./constants.ts";
const ADDED = "A";
const MODIFIED = "M";
const RENAMED = "R";
const COPIED = "C";
const SUPPORTED_DIFF_TYPES = [ADDED, MODIFIED, RENAMED, COPIED].join("");
-const EOL = Deno.build.os === "windows" ? "\r\n" : "\n";
/** The extension of flow files. */
export const FLOW_FILE_EXTENSION = ".flow-meta.xml";
diff --git a/src/main/flow_parser.ts b/src/main/flow_parser.ts
index c173b94..d84d3b2 100644
--- a/src/main/flow_parser.ts
+++ b/src/main/flow_parser.ts
@@ -20,7 +20,7 @@
*/
import { Parser } from "xml2js";
-import * as flowTypes from "./flow_types.ts";
+import type * as flowTypes from "./flow_types.ts";
const FAULT = "Fault";
const END = "End";
diff --git a/src/main/flow_to_uml_transformer.ts b/src/main/flow_to_uml_transformer.ts
index 0b6b41f..6a85b18 100644
--- a/src/main/flow_to_uml_transformer.ts
+++ b/src/main/flow_to_uml_transformer.ts
@@ -22,9 +22,9 @@
import { join } from "@std/path";
import { Configuration } from "./argument_processor.ts";
import { compareFlows } from "./flow_comparator.ts";
-import { FlowFileChangeDetector } from "./flow_file_change_detector.ts";
-import { FlowParser, ParsedFlow } from "./flow_parser.ts";
-import { UmlGeneratorContext } from "./uml_generator_context.ts";
+import type { FlowFileChangeDetector } from "./flow_file_change_detector.ts";
+import { FlowParser, type ParsedFlow } from "./flow_parser.ts";
+import type { UmlGeneratorContext } from "./uml_generator_context.ts";
import { XmlReader } from "./xml_reader.ts";
/**
diff --git a/src/main/graphviz_generator.ts b/src/main/graphviz_generator.ts
index 3601441..55405dc 100644
--- a/src/main/graphviz_generator.ts
+++ b/src/main/graphviz_generator.ts
@@ -20,6 +20,7 @@
import type { Transition } from "./flow_parser.ts";
import * as flowTypes from "./flow_types.ts";
+import { EOL } from "./constants.ts";
import {
type DiagramNode,
Icon as UmlIcon,
@@ -28,7 +29,6 @@ import {
UmlGenerator,
} from "./uml_generator.ts";
-const EOL = "\n";
const TABLE_BEGIN = `<
diff --git a/src/main/main.ts b/src/main/main.ts
index ed57599..c255cbe 100644
--- a/src/main/main.ts
+++ b/src/main/main.ts
@@ -21,7 +21,7 @@
import { Configuration } from "./argument_processor.ts";
import { FlowFileChangeDetector } from "./flow_file_change_detector.ts";
import {
- FlowDifference,
+ type FlowDifference,
FlowToUmlTransformer,
} from "./flow_to_uml_transformer.ts";
import { UmlGeneratorContext } from "./uml_generator_context.ts";
diff --git a/src/main/plantuml_generator.ts b/src/main/plantuml_generator.ts
index 5f323f5..fa8dfae 100644
--- a/src/main/plantuml_generator.ts
+++ b/src/main/plantuml_generator.ts
@@ -25,7 +25,7 @@ import {
Icon as UmlIcon,
UmlGenerator,
} from "./uml_generator.ts";
-const EOL = "\n";
+import { EOL } from "./constants.ts";
enum SkinColor {
NONE = "",
diff --git a/src/main/uml_generator.ts b/src/main/uml_generator.ts
index d3ea1df..2f153d0 100644
--- a/src/main/uml_generator.ts
+++ b/src/main/uml_generator.ts
@@ -19,10 +19,9 @@
* generate a UML string representation of a Salesforce flow.
*/
-import { ParsedFlow, Transition } from "./flow_parser.ts";
+import type { ParsedFlow, Transition } from "./flow_parser.ts";
import * as flowTypes from "./flow_types.ts";
-
-const EOL = Deno.build.os === "windows" ? "\r\n" : "\n";
+import { EOL } from "./constants.ts";
/**
* The skin color of the node.
@@ -215,7 +214,9 @@ export abstract class UmlGenerator {
start.filters.forEach((filter, index) => {
entryCriteria.push(
`${index + 1}. ${filter.field} ${filter.operator} ${
- toString(filter.value)
+ toString(
+ filter.value,
+ )
}`,
);
});
diff --git a/src/main/uml_generator_context.ts b/src/main/uml_generator_context.ts
index a78cd61..cbd05bf 100644
--- a/src/main/uml_generator_context.ts
+++ b/src/main/uml_generator_context.ts
@@ -22,11 +22,11 @@
*/
import { DiagramTool } from "./argument_processor.ts";
-import { ParsedFlow } from "./flow_parser.ts";
+import type { ParsedFlow } from "./flow_parser.ts";
import { GraphVizGenerator } from "./graphviz_generator.ts";
import { MermaidGenerator } from "./mermaid_generator.ts";
import { PlantUmlGenerator } from "./plantuml_generator.ts";
-import { UmlGenerator } from "./uml_generator.ts";
+import type { UmlGenerator } from "./uml_generator.ts";
/**
* This class is responsible for generating a UML diagram representation of a
diff --git a/src/main/uml_writer.ts b/src/main/uml_writer.ts
index 1553ae4..6d79c20 100644
--- a/src/main/uml_writer.ts
+++ b/src/main/uml_writer.ts
@@ -19,11 +19,14 @@
* the generated UML diagrams to a file.
*/
import { join } from "@std/path";
-import { Configuration, Mode, RuntimeConfig } from "./argument_processor.ts";
-import { FlowDifference } from "./flow_to_uml_transformer.ts";
+import {
+ Configuration,
+ Mode,
+ type RuntimeConfig,
+} from "./argument_processor.ts";
+import type { FlowDifference } from "./flow_to_uml_transformer.ts";
import { GithubClient } from "./github_client.ts";
-
-const EOL = Deno.build.os === "windows" ? "\r\n" : "\n";
+import { EOL } from "./constants.ts";
const FILE_EXTENSION = ".json";
const HIDDEN_COMMENT_PREFIX = "";
@@ -104,10 +107,7 @@ export class UmlWriter {
for (const [filePath, flowDifference] of this.filePathToFlowDifference) {
const flowApiName = this.extractFlowApiName(filePath);
- const outputPath = join(
- config.outputDirectory!,
- `${flowApiName}.md`,
- );
+ const outputPath = join(config.outputDirectory!, `${flowApiName}.md`);
let markdownContent = "";
const tripleBackticks = "```";
diff --git a/src/test/argument_processor_test.ts b/src/test/argument_processor_test.ts
index 2cf5db0..262c9dc 100644
--- a/src/test/argument_processor_test.ts
+++ b/src/test/argument_processor_test.ts
@@ -19,10 +19,10 @@ import {
DiagramTool,
ERROR_MESSAGES,
Mode,
- RuntimeConfig,
+ type RuntimeConfig,
} from "../main/argument_processor.ts";
import { assertEquals, assertThrows } from "@std/assert";
-import { getTestConfig } from "./test_utils.ts";
+import { getTestConfig } from "./utilities/mock_config.ts";
const INVALID_DIAGRAM_TOOL = "unsupported";
const INVALID_FILE_PATH = "invalid/file/path/which/does/not/exist";
@@ -130,23 +130,20 @@ Deno.test("ArgumentProcessor", async (t) => {
},
);
- await t.step(
- "should reject markdown mode without outputDirectory",
- () => {
- assertThrows(
- () => {
- const { argumentProcessor } = setupTest((config) => {
- config.mode = Mode.MARKDOWN;
- config.diagramTool = DiagramTool.MERMAID;
- config.outputDirectory = undefined;
- });
- argumentProcessor.getConfig();
- },
- Error,
- ERROR_MESSAGES.outputDirectoryRequired,
- );
- },
- );
+ await t.step("should reject markdown mode without outputDirectory", () => {
+ assertThrows(
+ () => {
+ const { argumentProcessor } = setupTest((config) => {
+ config.mode = Mode.MARKDOWN;
+ config.diagramTool = DiagramTool.MERMAID;
+ config.outputDirectory = undefined;
+ });
+ argumentProcessor.getConfig();
+ },
+ Error,
+ ERROR_MESSAGES.outputDirectoryRequired,
+ );
+ });
await t.step(
"should reject markdown mode with non-existent outputDirectory",
diff --git a/src/test/flow_comparator_test.ts b/src/test/flow_comparator_test.ts
index bd2e356..00e355d 100644
--- a/src/test/flow_comparator_test.ts
+++ b/src/test/flow_comparator_test.ts
@@ -17,7 +17,7 @@
import { assertEquals } from "@std/assert";
import { compareFlows } from "../main/flow_comparator.ts";
-import { ParsedFlow } from "../main/flow_parser.ts";
+import type { ParsedFlow } from "../main/flow_parser.ts";
import * as flowTypes from "../main/flow_types.ts";
const NODE = {
diff --git a/src/test/flow_file_change_detector_test.ts b/src/test/flow_file_change_detector_test.ts
index 9fe1767..1c6957d 100644
--- a/src/test/flow_file_change_detector_test.ts
+++ b/src/test/flow_file_change_detector_test.ts
@@ -16,42 +16,59 @@
import { assertEquals, assertThrows } from "@std/assert";
import { Configuration } from "../main/argument_processor.ts";
-import { getTestConfig } from "./test_utils.ts";
+import { getTestConfig } from "./utilities/mock_config.ts";
import {
ERROR_MESSAGES,
FLOW_FILE_EXTENSION,
FlowFileChangeDetector,
} from "../main/flow_file_change_detector.ts";
+import { EOL } from "../main/constants.ts";
-const EOL = Deno.build.os === "windows" ? "\r\n" : "\n";
const FLOW_FILE_PATH = "file2" + FLOW_FILE_EXTENSION;
-Deno.test("FlowFileChangeDetector", async (t) => {
- let detector: FlowFileChangeDetector;
-
- // Function to set up the mock implementations
- const setupMocks = () => {
- // tslint:disable:no-any
- (detector as any).executeVersionCommand = () => undefined;
- (detector as any).executeRevParseCommand = () => undefined;
- (detector as any).executeDiffCommand = () =>
- new TextEncoder().encode(
- ["file1.txt", FLOW_FILE_PATH, "file3.js"].join(EOL),
- );
- (detector as any).executeGetFileContentCommand = () =>
- new TextEncoder().encode("file content");
- // tslint:enable:no-any
+/**
+ * Test helper type that exposes private methods for mocking purposes.
+ * Uses Omit to remove private methods and re-adds them as public.
+ */
+type TestableFlowFileChangeDetector =
+ & Omit<
+ FlowFileChangeDetector,
+ | "executeVersionCommand"
+ | "executeRevParseCommand"
+ | "executeDiffCommand"
+ | "executeGetFileContentCommand"
+ >
+ & {
+ executeVersionCommand: () => void;
+ executeRevParseCommand: () => void;
+ executeDiffCommand: () => Uint8Array;
+ executeGetFileContentCommand: (
+ filePath: string,
+ commitHash: string,
+ ) => Uint8Array;
};
- await t.step("beforeEach setup", async () => {
- Configuration.getInstance = () => getTestConfig();
- detector = new FlowFileChangeDetector();
- setupMocks(); // Initial setup
- });
+function createDetector(): TestableFlowFileChangeDetector {
+ const detector =
+ new FlowFileChangeDetector() as unknown as TestableFlowFileChangeDetector;
+ detector.executeVersionCommand = () => undefined;
+ detector.executeRevParseCommand = () => undefined;
+ detector.executeDiffCommand = () =>
+ new TextEncoder().encode(
+ ["file1.txt", FLOW_FILE_PATH, "file3.js"].join(EOL),
+ );
+ detector.executeGetFileContentCommand = () =>
+ new TextEncoder().encode("file content");
+ return detector;
+}
+
+Configuration.getInstance = () => getTestConfig();
+Deno.test("FlowFileChangeDetector", async (t) => {
await t.step(
"should get flow files when git is installed and in a repo",
() => {
+ const detector = createDetector();
const flowFiles = detector.getFlowFiles();
assertEquals(flowFiles, [FLOW_FILE_PATH]);
@@ -59,9 +76,8 @@ Deno.test("FlowFileChangeDetector", async (t) => {
);
await t.step("should throw error if git is not installed", () => {
- setupMocks(); // Reset mocks before this test
- // tslint:disable-next-line:no-any
- (detector as any).executeVersionCommand = () => {
+ const detector = createDetector();
+ detector.executeVersionCommand = () => {
throw new Error(ERROR_MESSAGES.gitIsNotInstalledError);
};
@@ -73,9 +89,8 @@ Deno.test("FlowFileChangeDetector", async (t) => {
});
await t.step("should throw error if not in a git repo", () => {
- setupMocks(); // Reset mocks before this test
- // tslint:disable-next-line:no-any
- (detector as any).executeRevParseCommand = () => {
+ const detector = createDetector();
+ detector.executeRevParseCommand = () => {
throw new Error(ERROR_MESSAGES.notInGitRepoError);
};
@@ -87,9 +102,8 @@ Deno.test("FlowFileChangeDetector", async (t) => {
});
await t.step("should throw error if git diff fails", () => {
- setupMocks(); // Reset mocks before this test
- // tslint:disable-next-line:no-any
- (detector as any).executeDiffCommand = () => {
+ const detector = createDetector();
+ detector.executeDiffCommand = () => {
throw new Error("Diff error");
};
@@ -101,23 +115,22 @@ Deno.test("FlowFileChangeDetector", async (t) => {
});
await t.step("should get file content from old version", () => {
- setupMocks(); // Reset mocks before this test
+ const detector = createDetector();
const fileContent = detector.getFileContent(FLOW_FILE_PATH, "old");
assertEquals(fileContent, "file content");
});
await t.step("should get file content from new version", () => {
- setupMocks(); // Reset mocks before this test
+ const detector = createDetector();
const fileContent = detector.getFileContent(FLOW_FILE_PATH, "new");
assertEquals(fileContent, "file content");
});
await t.step("should throw error if unable to get file content", () => {
- setupMocks(); // Reset mocks before this test
- // tslint:disable-next-line:no-any
- (detector as any).executeGetFileContentCommand = () => {
+ const detector = createDetector();
+ detector.executeGetFileContentCommand = () => {
throw new Error("Get file content error");
};
diff --git a/src/test/flow_parser_test.ts b/src/test/flow_parser_test.ts
index 75e006d..e91e1fc 100644
--- a/src/test/flow_parser_test.ts
+++ b/src/test/flow_parser_test.ts
@@ -15,8 +15,12 @@
*/
import { join } from "@std/path";
-import { ERROR_MESSAGES, FlowParser, ParsedFlow } from "../main/flow_parser.ts";
-import * as flowTypes from "../main/flow_types.ts";
+import {
+ ERROR_MESSAGES,
+ FlowParser,
+ type ParsedFlow,
+} from "../main/flow_parser.ts";
+import type * as flowTypes from "../main/flow_types.ts";
import { assert, assertEquals, assertRejects } from "@std/assert";
const GOLDENS_PATH = "./src/test/goldens";
diff --git a/src/test/flow_to_uml_transformer_test.ts b/src/test/flow_to_uml_transformer_test.ts
index bbabf5b..fc9f799 100644
--- a/src/test/flow_to_uml_transformer_test.ts
+++ b/src/test/flow_to_uml_transformer_test.ts
@@ -21,11 +21,11 @@ import {
assertStringIncludes,
} from "@std/assert";
import { Configuration, DiagramTool } from "../main/argument_processor.ts";
-import { getTestConfig } from "./test_utils.ts";
+import { getTestConfig } from "./utilities/mock_config.ts";
import { FlowFileChangeDetector } from "../main/flow_file_change_detector.ts";
import {
ERROR_MESSAGES,
- FlowDifference,
+ type FlowDifference,
FlowToUmlTransformer,
} from "../main/flow_to_uml_transformer.ts";
import { UmlGeneratorContext } from "../main/uml_generator_context.ts";
diff --git a/src/test/github_client_test.ts b/src/test/github_client_test.ts
index e566d3c..60dfbc9 100644
--- a/src/test/github_client_test.ts
+++ b/src/test/github_client_test.ts
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-import { GithubClient, GithubComment } from "../main/github_client.ts";
+import { GithubClient, type GithubComment } from "../main/github_client.ts";
import { assertEquals, assertRejects } from "@std/assert";
import { ERROR_MESSAGES } from "../main/github_client.ts";
@@ -101,6 +101,7 @@ const mockContext = {
job: "mock-job",
runNumber: 1,
runId: 1,
+ runAttempt: 1,
action: "mock-action",
apiUrl: "https://api.github.com",
serverUrl: "https://github.com",
@@ -126,6 +127,7 @@ const invalidContext = {
job: "mock-job",
runNumber: 1,
runId: 1,
+ runAttempt: 1,
action: "mock-action",
apiUrl: "https://api.github.com",
serverUrl: "https://github.com",
diff --git a/src/test/graphviz_generator_test.ts b/src/test/graphviz_generator_test.ts
index 764aa14..7e42b9e 100644
--- a/src/test/graphviz_generator_test.ts
+++ b/src/test/graphviz_generator_test.ts
@@ -15,7 +15,7 @@
*/
import { assertEquals, assertStringIncludes } from "@std/assert";
-import { ParsedFlow } from "../main/flow_parser.ts";
+import type { ParsedFlow } from "../main/flow_parser.ts";
import * as flowTypes from "../main/flow_types.ts";
import {
FontColor,
@@ -24,154 +24,12 @@ import {
SkinColor,
} from "../main/graphviz_generator.ts";
import {
- DiagramNode,
+ type DiagramNode,
Icon as UmlIcon,
SkinColor as UmlSkinColor,
} from "../main/uml_generator.ts";
-
-const EOL = Deno.build.os === "windows" ? "\r\n" : "\n";
-const NODE_NAMES = {
- start: "FLOW_START",
- apexPluginCall: "myApexPluginCall",
- assignment: "myAssignment",
- collectionProcessor: "myCollectionProcessor",
- decision: "myDecision",
- loop: "myLoop",
- orchestratedStage: "myOrchestratedStage",
- recordCreate: "myRecordCreate",
- recordDelete: "myRecordDelete",
- recordLookup: "myRecordLookup",
- recordRollback: "myRecordRollback",
- recordUpdate: "myRecordUpdate",
- screen: "myScreen",
- stageSteps: ["step1", "step2", "step3"],
- step: "myStep",
- subflow: "mySubflow",
- transform: "myTransform",
- wait: "myWait",
- actionCall: "myActionCall",
-};
-
-function generateMockFlow(): ParsedFlow {
- return {
- start: {
- name: NODE_NAMES.start,
- } as flowTypes.FlowStart,
- apexPluginCalls: getFlowNodes(
- NODE_NAMES.apexPluginCall,
- ) as flowTypes.FlowApexPluginCall[],
- assignments: getFlowNodes(
- NODE_NAMES.assignment,
- ) as flowTypes.FlowAssignment[],
- collectionProcessors: getFlowNodes(
- NODE_NAMES.collectionProcessor,
- ) as flowTypes.FlowCollectionProcessor[],
- decisions: [
- generateDecision(NODE_NAMES.decision),
- ] as flowTypes.FlowDecision[],
- loops: getFlowNodes(NODE_NAMES.loop) as flowTypes.FlowLoop[],
- orchestratedStages: [
- generateStage(NODE_NAMES.orchestratedStage, NODE_NAMES.stageSteps),
- ],
- recordCreates: getFlowNodes(
- NODE_NAMES.recordCreate,
- ) as flowTypes.FlowRecordCreate[],
- recordDeletes: getFlowNodes(
- NODE_NAMES.recordDelete,
- ) as flowTypes.FlowRecordDelete[],
- recordLookups: getFlowNodes(
- NODE_NAMES.recordLookup,
- ) as flowTypes.FlowRecordLookup[],
- recordRollbacks: getFlowNodes(
- NODE_NAMES.recordRollback,
- ) as flowTypes.FlowRecordRollback[],
- recordUpdates: getFlowNodes(
- NODE_NAMES.recordUpdate,
- ) as flowTypes.FlowRecordUpdate[],
- screens: getFlowNodes(NODE_NAMES.screen) as flowTypes.FlowScreen[],
- steps: getFlowNodes(NODE_NAMES.step) as flowTypes.FlowStep[],
- subflows: getFlowNodes(NODE_NAMES.subflow) as flowTypes.FlowSubflow[],
- transforms: getFlowNodes(NODE_NAMES.transform) as flowTypes.FlowTransform[],
- waits: getFlowNodes(NODE_NAMES.wait) as flowTypes.FlowWait[],
- actionCalls: getFlowNodes(
- NODE_NAMES.actionCall,
- ) as flowTypes.FlowActionCall[],
- transitions: [
- {
- from: NODE_NAMES.start,
- to: NODE_NAMES.apexPluginCall,
- fault: false,
- },
- {
- from: NODE_NAMES.apexPluginCall,
- to: NODE_NAMES.assignment,
- fault: false,
- },
- {
- from: NODE_NAMES.assignment,
- to: NODE_NAMES.collectionProcessor,
- fault: false,
- },
- ],
- };
-}
-
-function getFlowNodes(name: string): flowTypes.FlowNode[] {
- return [{ name: `${name}`, label: `${name}` }] as flowTypes.FlowNode[];
-}
-
-function generateStage(
- name: string,
- stepNames: string[],
-): flowTypes.FlowOrchestratedStage {
- return {
- name: `${name}`,
- label: `${name}`,
- elementSubtype: "OrchestratedStage",
- locationX: 0,
- locationY: 0,
- description: `${name}`,
- stageSteps: stepNames.map((stepName) => ({
- name: `${stepName}`,
- label: `${stepName}`,
- elementSubtype: "Step",
- locationX: 0,
- locationY: 0,
- description: `${stepName}`,
- actionName: `${stepName}Action`,
- actionType: flowTypes.FlowStageStepActionType.STEP_BACKGROUND,
- })),
- } as flowTypes.FlowOrchestratedStage;
-}
-
-function generateDecision(name: string): flowTypes.FlowDecision {
- return {
- name: `${name}`,
- label: `${name}`,
- elementSubtype: "Decision",
- locationX: 0,
- locationY: 0,
- description: `${name}`,
- rules: [
- {
- name: `${name}Rule`,
- label: `${name}Rule`,
- description: `${name}Rule`,
- conditionLogic: "and",
- conditions: [
- {
- leftValueReference: "foo",
- operator: flowTypes.FlowComparisonOperator.EQUAL_TO,
- rightValue: {
- booleanValue: "true",
- },
- processMetadataValues: [],
- },
- ],
- },
- ],
- } as flowTypes.FlowDecision;
-}
+import { generateMockFlow } from "./utilities/mock_flow.ts";
+import { EOL } from "../main/constants.ts";
function generateTable(
nodeName: string,
@@ -221,15 +79,10 @@ function generateInnerNodeCells(cells: string[]) {
}
Deno.test("GraphVizGenerator", async (t) => {
- let systemUnderTest: GraphVizGenerator;
- let mockedFlow: ParsedFlow;
+ const mockedFlow = generateMockFlow();
+ const systemUnderTest = new GraphVizGenerator(mockedFlow);
let result: string;
- await t.step("Setup", () => {
- mockedFlow = generateMockFlow();
- systemUnderTest = new GraphVizGenerator(mockedFlow);
- });
-
await t.step("should generate header", () => {
const label = "foo";
result = systemUnderTest.getHeader(label);
diff --git a/src/test/main_test.ts b/src/test/main_test.ts
index f3def6a..c9d5416 100644
--- a/src/test/main_test.ts
+++ b/src/test/main_test.ts
@@ -20,7 +20,7 @@ import {
Configuration,
DiagramTool,
Mode,
- RuntimeConfig,
+ type RuntimeConfig,
} from "../main/argument_processor.ts";
import { Runner } from "../main/main.ts";
diff --git a/src/test/mermaid_generator_test.ts b/src/test/mermaid_generator_test.ts
index 2727949..bde191e 100644
--- a/src/test/mermaid_generator_test.ts
+++ b/src/test/mermaid_generator_test.ts
@@ -15,195 +15,22 @@
*/
import { assertEquals, assertStringIncludes } from "@std/assert";
-import { ParsedFlow, Transition } from "../main/flow_parser.ts";
+import type { ParsedFlow, Transition } from "../main/flow_parser.ts";
import * as flowTypes from "../main/flow_types.ts";
import { MermaidGenerator } from "../main/mermaid_generator.ts";
import {
- DiagramNode,
+ type DiagramNode,
Icon as UmlIcon,
SkinColor as UmlSkinColor,
} from "../main/uml_generator.ts";
-
-const NODE_NAMES = {
- start: "FLOW_START",
- apexPluginCall: "myApexPluginCall",
- assignment: "myAssignment",
- collectionProcessor: "myCollectionProcessor",
- decision: "myDecision",
- loop: "myLoop",
- orchestratedStage: "myOrchestratedStage",
- recordCreate: "myRecordCreate",
- recordDelete: "myRecordDelete",
- recordLookup: "myRecordLookup",
- recordRollback: "myRecordRollback",
- recordUpdate: "myRecordUpdate",
- screen: "myScreen",
- stageSteps: ["step1", "step2", "step3"],
- step: "myStep",
- subflow: "mySubflow",
- transform: "myTransform",
- wait: "myWait",
- actionCall: "myActionCall",
-};
-
-function generateMockFlow(): ParsedFlow {
- return {
- label: "Test Flow",
- start: {
- name: NODE_NAMES.start,
- } as flowTypes.FlowStart,
- apexPluginCalls: getFlowNodes(
- NODE_NAMES.apexPluginCall,
- ) as flowTypes.FlowApexPluginCall[],
- assignments: getFlowNodes(
- NODE_NAMES.assignment,
- ) as flowTypes.FlowAssignment[],
- collectionProcessors: getFlowNodes(
- NODE_NAMES.collectionProcessor,
- ) as flowTypes.FlowCollectionProcessor[],
- decisions: [
- generateDecision(NODE_NAMES.decision),
- ] as flowTypes.FlowDecision[],
- loops: getFlowNodes(NODE_NAMES.loop) as flowTypes.FlowLoop[],
- orchestratedStages: [
- generateStage(NODE_NAMES.orchestratedStage, NODE_NAMES.stageSteps),
- ],
- recordCreates: getFlowNodes(
- NODE_NAMES.recordCreate,
- ) as flowTypes.FlowRecordCreate[],
- recordDeletes: getFlowNodes(
- NODE_NAMES.recordDelete,
- ) as flowTypes.FlowRecordDelete[],
- recordLookups: getFlowNodes(
- NODE_NAMES.recordLookup,
- ) as flowTypes.FlowRecordLookup[],
- recordRollbacks: getFlowNodes(
- NODE_NAMES.recordRollback,
- ) as flowTypes.FlowRecordRollback[],
- recordUpdates: getFlowNodes(
- NODE_NAMES.recordUpdate,
- ) as flowTypes.FlowRecordUpdate[],
- screens: getFlowNodes(NODE_NAMES.screen) as flowTypes.FlowScreen[],
- steps: getFlowNodes(NODE_NAMES.step) as flowTypes.FlowStep[],
- subflows: getFlowNodes(NODE_NAMES.subflow) as flowTypes.FlowSubflow[],
- transforms: getFlowNodes(NODE_NAMES.transform) as flowTypes.FlowTransform[],
- waits: getFlowNodes(NODE_NAMES.wait) as flowTypes.FlowWait[],
- actionCalls: getFlowNodes(
- NODE_NAMES.actionCall,
- ) as flowTypes.FlowActionCall[],
- transitions: [
- {
- from: NODE_NAMES.start,
- to: NODE_NAMES.apexPluginCall,
- fault: false,
- },
- {
- from: NODE_NAMES.apexPluginCall,
- to: NODE_NAMES.assignment,
- fault: false,
- label: "Normal Transition",
- },
- {
- from: NODE_NAMES.assignment,
- to: NODE_NAMES.decision,
- fault: true,
- label: "Error Path",
- },
- ],
- };
-}
-
-function getFlowNodes(name: string): flowTypes.FlowNode[] {
- return [{ name: `${name}`, label: `${name}` }] as flowTypes.FlowNode[];
-}
-
-function generateStage(
- name: string,
- stepNames: string[],
-): flowTypes.FlowOrchestratedStage {
- return {
- name: `${name}`,
- label: `${name}`,
- elementSubtype: "OrchestratedStage",
- locationX: 0,
- locationY: 0,
- description: `${name}`,
- stageSteps: stepNames.map((stepName) => ({
- name: `${stepName}`,
- label: `${stepName}`,
- elementSubtype: "Step",
- locationX: 0,
- locationY: 0,
- description: `${stepName}`,
- actionName: `${stepName}Action`,
- actionType: flowTypes.FlowStageStepActionType.STEP_BACKGROUND,
- })),
- } as flowTypes.FlowOrchestratedStage;
-}
-
-function generateDecision(name: string): flowTypes.FlowDecision {
- return {
- name: name,
- label: name,
- elementSubtype: "Decision",
- locationX: 0,
- locationY: 0,
- description: name,
- rules: [
- {
- name: `${name}_rule1`,
- label: "Rule 1",
- description: "Rule 1 description",
- conditions: [
- {
- leftValueReference: "leftValue",
- operator: "EqualTo",
- rightValue: {
- stringValue: "rightValue",
- },
- processMetadataValues: [],
- },
- ],
- },
- {
- name: `${name}_rule2`,
- label: "Rule 2",
- description: "Rule 2 description",
- conditions: [
- {
- leftValueReference: "anotherLeftValue",
- operator: "NotEqualTo",
- rightValue: {
- stringValue: "anotherRightValue",
- },
- processMetadataValues: [],
- },
- {
- leftValueReference: "thirdLeftValue",
- operator: "GreaterThan",
- rightValue: {
- numberValue: "10",
- },
- processMetadataValues: [],
- },
- ],
- conditionLogic: "1 AND 2",
- },
- ],
- } as flowTypes.FlowDecision;
-}
+import { generateMockFlow } from "./utilities/mock_flow.ts";
// @ts-ignore: Deno types
Deno.test("MermaidGenerator", async (t) => {
- let systemUnderTest: MermaidGenerator;
- let mockedFlow: ParsedFlow;
+ const mockedFlow = generateMockFlow();
+ const systemUnderTest = new MermaidGenerator(mockedFlow);
let result: string;
- await t.step("Setup", () => {
- mockedFlow = generateMockFlow();
- systemUnderTest = new MermaidGenerator(mockedFlow);
- });
-
await t.step("should generate header", () => {
const label = "Test Flow";
result = systemUnderTest.getHeader(label);
diff --git a/src/test/plantuml_generator_test.ts b/src/test/plantuml_generator_test.ts
index 068399c..1e86628 100644
--- a/src/test/plantuml_generator_test.ts
+++ b/src/test/plantuml_generator_test.ts
@@ -15,7 +15,6 @@
*/
import { assertEquals, assertStringIncludes } from "@std/assert";
-import type { ParsedFlow } from "../main/flow_parser.ts";
import * as flowTypes from "../main/flow_types.ts";
import { PlantUmlGenerator } from "../main/plantuml_generator.ts";
import {
@@ -23,129 +22,13 @@ import {
Icon as UmlIcon,
SkinColor as UmlSkinColor,
} from "../main/uml_generator.ts";
-
-const NODE_NAMES = {
- start: "FLOW_START",
- apexPluginCall: "myApexPluginCall",
- assignment: "myAssignment",
- collectionProcessor: "myCollectionProcessor",
- decision: "myDecision",
- loop: "myLoop",
- orchestratedStage: "myOrchestratedStage",
- recordCreate: "myRecordCreate",
- recordDelete: "myRecordDelete",
- recordLookup: "myRecordLookup",
- recordRollback: "myRecordRollback",
- recordUpdate: "myRecordUpdate",
- screen: "myScreen",
- stageSteps: ["step1", "step2", "step3"],
- step: "myStep",
- subflow: "mySubflow",
- transform: "myTransform",
- wait: "myWait",
- actionCall: "myActionCall",
-};
-
-function generateMockFlow(): ParsedFlow {
- return {
- start: {
- name: NODE_NAMES.start,
- } as flowTypes.FlowStart,
- apexPluginCalls: getFlowNodes(
- NODE_NAMES.apexPluginCall,
- ) as flowTypes.FlowApexPluginCall[],
- assignments: getFlowNodes(
- NODE_NAMES.assignment,
- ) as flowTypes.FlowAssignment[],
- collectionProcessors: getFlowNodes(
- NODE_NAMES.collectionProcessor,
- ) as flowTypes.FlowCollectionProcessor[],
- decisions: getFlowNodes(NODE_NAMES.decision) as flowTypes.FlowDecision[],
- loops: getFlowNodes(NODE_NAMES.loop) as flowTypes.FlowLoop[],
- orchestratedStages: [
- generateStage(NODE_NAMES.orchestratedStage, NODE_NAMES.stageSteps),
- ],
- recordCreates: getFlowNodes(
- NODE_NAMES.recordCreate,
- ) as flowTypes.FlowRecordCreate[],
- recordDeletes: getFlowNodes(
- NODE_NAMES.recordDelete,
- ) as flowTypes.FlowRecordDelete[],
- recordLookups: getFlowNodes(
- NODE_NAMES.recordLookup,
- ) as flowTypes.FlowRecordLookup[],
- recordRollbacks: getFlowNodes(
- NODE_NAMES.recordRollback,
- ) as flowTypes.FlowRecordRollback[],
- recordUpdates: getFlowNodes(
- NODE_NAMES.recordUpdate,
- ) as flowTypes.FlowRecordUpdate[],
- screens: getFlowNodes(NODE_NAMES.screen) as flowTypes.FlowScreen[],
- steps: getFlowNodes(NODE_NAMES.step) as flowTypes.FlowStep[],
- subflows: getFlowNodes(NODE_NAMES.subflow) as flowTypes.FlowSubflow[],
- transforms: getFlowNodes(NODE_NAMES.transform) as flowTypes.FlowTransform[],
- waits: getFlowNodes(NODE_NAMES.wait) as flowTypes.FlowWait[],
- actionCalls: getFlowNodes(
- NODE_NAMES.actionCall,
- ) as flowTypes.FlowActionCall[],
- transitions: [
- {
- from: NODE_NAMES.start,
- to: NODE_NAMES.apexPluginCall,
- fault: false,
- },
- {
- from: NODE_NAMES.apexPluginCall,
- to: NODE_NAMES.assignment,
- fault: false,
- },
- {
- from: NODE_NAMES.assignment,
- to: NODE_NAMES.collectionProcessor,
- fault: false,
- },
- ],
- };
-}
-
-function getFlowNodes(name: string): flowTypes.FlowNode[] {
- return [{ name: `${name}`, label: `${name}` }] as flowTypes.FlowNode[];
-}
-
-function generateStage(
- name: string,
- stepNames: string[],
-): flowTypes.FlowOrchestratedStage {
- return {
- name: `${name}`,
- label: `${name}`,
- elementSubtype: "OrchestratedStage",
- locationX: 0,
- locationY: 0,
- description: `${name}`,
- stageSteps: stepNames.map((stepName) => ({
- name: `${stepName}`,
- label: `${stepName}`,
- elementSubtype: "Step",
- locationX: 0,
- locationY: 0,
- description: `${stepName}`,
- actionName: `${stepName}Action`,
- actionType: flowTypes.FlowStageStepActionType.STEP_BACKGROUND,
- })),
- } as flowTypes.FlowOrchestratedStage;
-}
+import { generateMockFlow } from "./utilities/mock_flow.ts";
Deno.test("PlantUmlGenerator", async (t) => {
- let systemUnderTest: PlantUmlGenerator;
- let mockedFlow: ParsedFlow;
+ const mockedFlow = generateMockFlow();
+ const systemUnderTest = new PlantUmlGenerator(mockedFlow);
let result: string;
- await t.step("Setup", () => {
- mockedFlow = generateMockFlow();
- systemUnderTest = new PlantUmlGenerator(mockedFlow);
- });
-
await t.step("should generate header", () => {
const label = "foo";
result = systemUnderTest.getHeader(label);
diff --git a/src/test/uml_generator_context_test.ts b/src/test/uml_generator_context_test.ts
index b225ea5..a311340 100644
--- a/src/test/uml_generator_context_test.ts
+++ b/src/test/uml_generator_context_test.ts
@@ -16,7 +16,7 @@
import { assertStringIncludes } from "@std/assert";
import { DiagramTool } from "../main/argument_processor.ts";
-import { ParsedFlow } from "../main/flow_parser.ts";
+import type { ParsedFlow } from "../main/flow_parser.ts";
import { UmlGeneratorContext } from "../main/uml_generator_context.ts";
const PLANT_UML_SIGNATURE = "skinparam State";
diff --git a/src/test/uml_generator_test.ts b/src/test/uml_generator_test.ts
index 926d85f..f6b5dd1 100644
--- a/src/test/uml_generator_test.ts
+++ b/src/test/uml_generator_test.ts
@@ -15,35 +15,14 @@
*/
import { assertEquals } from "@std/assert";
-import { ParsedFlow, Transition } from "../main/flow_parser.ts";
+import type { ParsedFlow, Transition } from "../main/flow_parser.ts";
import * as flowTypes from "../main/flow_types.ts";
-import { DiagramNode, UmlGenerator } from "../main/uml_generator.ts";
+import { type DiagramNode, UmlGenerator } from "../main/uml_generator.ts";
+import { generateMockFlow, NODE_NAMES } from "./utilities/mock_flow.ts";
+import { EOL } from "../main/constants.ts";
-const EOL = Deno.build.os === "windows" ? "\r\n" : "\n";
const TRANSITION_ARROW = "-->";
-const NODE_NAMES = {
- label: "test",
- start: "start",
- apexPluginCall: "myApexPluginCall",
- assignment: "myAssignment",
- collectionProcessor: "myCollectionProcessor",
- decision: "myDecision",
- loop: "myLoop",
- orchestratedStage: "myOrchestratedStage",
- recordCreate: "myRecordCreate",
- recordDelete: "myRecordDelete",
- recordLookup: "myRecordLookup",
- recordRollback: "myRecordRollback",
- recordUpdate: "myRecordUpdate",
- screen: "myScreen",
- step: "myStep",
- subflow: "mySubflow",
- transform: "myTransform",
- wait: "myWait",
- actionCall: "myActionCall",
-};
-
const UML_REPRESENTATIONS = {
flowStart: () =>
`state Flow Start FLOW_START
@@ -55,9 +34,19 @@ const UML_REPRESENTATIONS = {
var1 = Hello World
var2 AddItem Test Value`,
collectionProcessor: (name: string) => `state Collection Processor ${name}`,
- decision: (name: string) => `state Decision ${name}${EOL}`,
+ decision: (name: string) =>
+ `state Decision ${name}
+ Rule: myDecisionRule
+ 1. foo EqualTo true`,
loop: (name: string) => `state Loop ${name}`,
- orchestratedStage: (name: string) => `state Orchestrated Stage ${name}${EOL}`,
+ orchestratedStage: (name: string) =>
+ `state Orchestrated Stage ${name}
+ Step: 1. step1
+
+ Step: 2. step2
+
+ Step: 3. step3
+`,
recordCreate: (name: string) => `state Record Create ${name}`,
recordDelete: (name: string) => `state Record Delete ${name}`,
recordLookup: (name: string) =>
@@ -80,194 +69,45 @@ const UML_REPRESENTATIONS = {
transition: (from: string, to: string) => `${from} ${TRANSITION_ARROW} ${to}`,
};
-function generateMockFlow() {
- return {
- label: NODE_NAMES.label,
- start: {
- name: NODE_NAMES.start,
- } as flowTypes.FlowStart,
- apexPluginCalls: getFlowNodes(
- NODE_NAMES.apexPluginCall,
- ) as flowTypes.FlowApexPluginCall[],
- assignments: getFlowNodes(
- NODE_NAMES.assignment,
- ) as flowTypes.FlowAssignment[],
- collectionProcessors: getFlowNodes(
- NODE_NAMES.collectionProcessor,
- ) as flowTypes.FlowCollectionProcessor[],
- decisions: getFlowNodes(NODE_NAMES.decision) as flowTypes.FlowDecision[],
- loops: getFlowNodes(NODE_NAMES.loop) as flowTypes.FlowLoop[],
- orchestratedStages: getFlowNodes(
- NODE_NAMES.orchestratedStage,
- ) as flowTypes.FlowOrchestratedStage[],
- recordCreates: getFlowNodes(
- NODE_NAMES.recordCreate,
- ) as flowTypes.FlowRecordCreate[],
- recordDeletes: getFlowNodes(
- NODE_NAMES.recordDelete,
- ) as flowTypes.FlowRecordDelete[],
- recordLookups: getFlowNodes(
- NODE_NAMES.recordLookup,
- ) as flowTypes.FlowRecordLookup[],
- recordRollbacks: getFlowNodes(
- NODE_NAMES.recordRollback,
- ) as flowTypes.FlowRecordRollback[],
- recordUpdates: getFlowNodes(
- NODE_NAMES.recordUpdate,
- ) as flowTypes.FlowRecordUpdate[],
- screens: getFlowNodes(NODE_NAMES.screen) as flowTypes.FlowScreen[],
- steps: getFlowNodes(NODE_NAMES.step) as flowTypes.FlowStep[],
- subflows: getFlowNodes(NODE_NAMES.subflow) as flowTypes.FlowSubflow[],
- transforms: getFlowNodes(NODE_NAMES.transform) as flowTypes.FlowTransform[],
- waits: getFlowNodes(NODE_NAMES.wait) as flowTypes.FlowWait[],
- actionCalls: getFlowNodes(
- NODE_NAMES.actionCall,
- ) as flowTypes.FlowActionCall[],
- transitions: [
- {
- from: NODE_NAMES.start,
- to: NODE_NAMES.apexPluginCall,
- fault: false,
- },
- {
- from: NODE_NAMES.apexPluginCall,
- to: NODE_NAMES.assignment,
- fault: false,
- },
- {
- from: NODE_NAMES.assignment,
- to: NODE_NAMES.collectionProcessor,
- fault: false,
- },
- ],
- };
-}
-
-function getFlowNodes(name: string): flowTypes.FlowNode[] {
- const baseNode = {
- name: name,
- label: name,
- locationX: 0,
- locationY: 0,
- description: "",
- };
-
- // Add specific properties based on node name
- if (name === NODE_NAMES.recordUpdate) {
- return [
- {
- ...baseNode,
- object: "Account",
- inputAssignments: [],
- inputReference: "",
- elementSubtype: "RecordUpdate",
- filters: [],
- },
- ] as flowTypes.FlowRecordUpdate[];
+class ConcreteUmlGenerator extends UmlGenerator {
+ getHeader(label: string): string {
+ return label;
}
+ toUmlString(node: DiagramNode): string {
+ let result = `state ${node.type} ${node.id}`;
+ if (node.innerNodes) {
+ const innerContent = node.innerNodes
+ .map((innerNode) => {
+ const header = [innerNode.type, innerNode.label]
+ .filter(Boolean)
+ .join(": ");
+
+ const content = innerNode.content
+ .map((line) => ` ${line}`)
+ .join(EOL);
- if (name === NODE_NAMES.recordLookup) {
- return [
- {
- ...baseNode,
- object: "Account",
- elementSubtype: "RecordLookup",
- },
- ] as flowTypes.FlowRecordLookup[];
+ return header ? ` ${header}${EOL}${content}` : content;
+ })
+ .join(EOL);
+ result += EOL + innerContent;
+ }
+ return result;
}
-
- if (name === NODE_NAMES.recordCreate) {
- return [
- {
- ...baseNode,
- object: "Account",
- elementSubtype: "RecordCreate",
- },
- ] as flowTypes.FlowRecordCreate[];
+ getTransition(transition: Transition): string {
+ return UML_REPRESENTATIONS.transition(transition.from, transition.to);
}
-
- if (name === NODE_NAMES.recordDelete) {
- return [
- {
- ...baseNode,
- object: "Account",
- elementSubtype: "RecordDelete",
- },
- ] as flowTypes.FlowRecordDelete[];
+ getFooter(): string {
+ return "";
}
-
- if (name === NODE_NAMES.assignment) {
- return [
- {
- ...baseNode,
- elementSubtype: "Assignment",
- assignmentItems: [
- {
- assignToReference: "var1",
- operator: flowTypes.FlowAssignmentOperator.ASSIGN,
- value: {
- stringValue: "Hello World",
- },
- processMetadataValues: [],
- },
- {
- assignToReference: "var2",
- operator: flowTypes.FlowAssignmentOperator.ADD_ITEM,
- value: {
- stringValue: "Test Value",
- },
- processMetadataValues: [],
- },
- ],
- },
- ] as flowTypes.FlowAssignment[];
- }
-
- // Return basic node for other types
- return [baseNode] as flowTypes.FlowNode[];
}
Deno.test("UmlGenerator", async (t) => {
let systemUnderTest: UmlGenerator;
let mockParsedFlow: ParsedFlow;
- await t.step("setup", () => {
- mockParsedFlow = generateMockFlow();
-
- class ConcreteUmlGenerator extends UmlGenerator {
- getHeader(label: string): string {
- return label;
- }
- toUmlString(node: DiagramNode): string {
- let result = `state ${node.type} ${node.id}`;
- if (node.innerNodes) {
- const innerContent = node.innerNodes
- .map((innerNode) => {
- const header = [innerNode.type, innerNode.label]
- .filter(Boolean)
- .join(": ");
-
- const content = innerNode.content
- .map((line) => ` ${line}`)
- .join(EOL);
-
- return header ? ` ${header}${EOL}${content}` : content;
- })
- .join(EOL);
- result += EOL + innerContent;
- }
- return result;
- }
- getTransition(transition: Transition): string {
- return UML_REPRESENTATIONS.transition(transition.from, transition.to);
- }
- getFooter(): string {
- return "";
- }
- }
-
- systemUnderTest = new ConcreteUmlGenerator(mockParsedFlow);
- });
+ // Setup: initialize test data and system under test
+ mockParsedFlow = generateMockFlow();
+ systemUnderTest = new ConcreteUmlGenerator(mockParsedFlow);
await t.step("should generate UML with all flow elements", () => {
const uml = systemUnderTest.generateUml();
@@ -300,6 +140,10 @@ Deno.test("UmlGenerator", async (t) => {
NODE_NAMES.apexPluginCall,
NODE_NAMES.assignment,
),
+ UML_REPRESENTATIONS.transition(
+ NODE_NAMES.assignment,
+ NODE_NAMES.decision,
+ ),
UML_REPRESENTATIONS.transition(
NODE_NAMES.assignment,
NODE_NAMES.collectionProcessor,
@@ -896,241 +740,136 @@ Deno.test("UmlGenerator", async (t) => {
},
);
- await t.step(
- "should handle minimal start node configuration",
- () => {
- // Create a minimal mock flow with just the start node
- const minimalMockFlow: ParsedFlow = {
- label: "Minimal Test",
- processType: undefined,
- start: {
- name: "FLOW_START",
- label: "Flow Start",
- locationX: 0,
- locationY: 0,
- elementSubtype: "Start",
- description: "Minimal flow start",
- connector: { targetReference: "nextNode", isGoTo: false },
- },
- transitions: [],
- };
+ await t.step("should handle minimal start node configuration", () => {
+ // Create a minimal mock flow with just the start node
+ const minimalMockFlow: ParsedFlow = {
+ label: "Minimal Test",
+ processType: undefined,
+ start: {
+ name: "FLOW_START",
+ label: "Flow Start",
+ locationX: 0,
+ locationY: 0,
+ elementSubtype: "Start",
+ description: "Minimal flow start",
+ connector: { targetReference: "nextNode", isGoTo: false },
+ },
+ transitions: [],
+ };
- // Create a fresh generator with the minimal mock flow
- class ConcreteUmlGenerator extends UmlGenerator {
- getHeader(label: string): string {
- return label;
- }
- toUmlString(node: DiagramNode): string {
- let result = `state ${node.type} ${node.id}`;
- if (node.innerNodes) {
- const innerContent = node.innerNodes
- .map((innerNode) => {
- const header = [innerNode.type, innerNode.label]
- .filter(Boolean)
- .join(": ");
-
- const content = innerNode.content
- .map((line) => ` ${line}`)
- .join(EOL);
-
- return header ? ` ${header}${EOL}${content}` : content;
- })
- .join(EOL);
- result += EOL + innerContent;
- }
- return result;
- }
- getTransition(transition: Transition): string {
- return UML_REPRESENTATIONS.transition(transition.from, transition.to);
- }
- getFooter(): string {
- return "";
- }
- }
-
- const minimalGenerator = new ConcreteUmlGenerator(minimalMockFlow);
- const uml = minimalGenerator.generateUml();
+ // Create a fresh generator with the minimal mock flow
+ const minimalGenerator = new ConcreteUmlGenerator(minimalMockFlow);
+ const uml = minimalGenerator.generateUml();
- const expectedContent = [
- "Flow Start FLOW_START",
- "Flow Details",
- "No specific entry criteria defined",
- ];
+ const expectedContent = [
+ "Flow Start FLOW_START",
+ "Flow Details",
+ "No specific entry criteria defined",
+ ];
- expectedContent.forEach((content) => {
- assertEquals(
- uml.includes(content),
- true,
- `Expected UML: ${uml} to contain: ${content}`,
- );
- });
+ expectedContent.forEach((content) => {
+ assertEquals(
+ uml.includes(content),
+ true,
+ `Expected UML: ${uml} to contain: ${content}`,
+ );
+ });
- // Should not contain any specific trigger information
- const unexpectedContent = [
- "Process Type:",
- "Trigger Type:",
- "Filter Logic:",
- ];
+ // Should not contain any specific trigger information
+ const unexpectedContent = [
+ "Process Type:",
+ "Trigger Type:",
+ "Filter Logic:",
+ ];
- unexpectedContent.forEach((content) => {
- assertEquals(
- uml.includes(content),
- false,
- `Expected UML: ${uml} to NOT contain: ${content}`,
- );
- });
- },
- );
+ unexpectedContent.forEach((content) => {
+ assertEquals(
+ uml.includes(content),
+ false,
+ `Expected UML: ${uml} to NOT contain: ${content}`,
+ );
+ });
+ });
- await t.step(
- "should handle start node with record change criteria",
- () => {
- // Create a record change mock flow with just the start node
- const recordChangeMockFlow: ParsedFlow = {
- label: "Record Change Test",
- processType: flowTypes.FlowProcessType.FLOW,
- start: {
- name: "FLOW_START",
- label: "Flow Start",
- locationX: 0,
- locationY: 0,
- elementSubtype: "Start",
- description: "Record change flow start",
- connector: { targetReference: "nextNode", isGoTo: false },
- triggerType: flowTypes.FlowTriggerType.RECORD_BEFORE_SAVE,
- object: "Opportunity",
- recordTriggerType: flowTypes.RecordTriggerType.UPDATE,
- entryType: flowTypes.FlowEntryType.ALWAYS,
- doesRequireRecordChangedToMeetCriteria: true,
- filterLogic: "1 OR 2",
- filters: [
- {
- field: "StageName",
- operator: flowTypes.FlowRecordFilterOperator.EQUAL_TO,
- value: { stringValue: "Closed Won" },
- },
- {
- field: "Amount",
- operator: flowTypes.FlowRecordFilterOperator.GREATER_THAN,
- value: { numberValue: "100000" },
- },
- ],
- },
- transitions: [],
- };
+ await t.step("should handle start node with record change criteria", () => {
+ // Create a record change mock flow with just the start node
+ const recordChangeMockFlow: ParsedFlow = {
+ label: "Record Change Test",
+ processType: flowTypes.FlowProcessType.FLOW,
+ start: {
+ name: "FLOW_START",
+ label: "Flow Start",
+ locationX: 0,
+ locationY: 0,
+ elementSubtype: "Start",
+ description: "Record change flow start",
+ connector: { targetReference: "nextNode", isGoTo: false },
+ triggerType: flowTypes.FlowTriggerType.RECORD_BEFORE_SAVE,
+ object: "Opportunity",
+ recordTriggerType: flowTypes.RecordTriggerType.UPDATE,
+ entryType: flowTypes.FlowEntryType.ALWAYS,
+ doesRequireRecordChangedToMeetCriteria: true,
+ filterLogic: "1 OR 2",
+ filters: [
+ {
+ field: "StageName",
+ operator: flowTypes.FlowRecordFilterOperator.EQUAL_TO,
+ value: { stringValue: "Closed Won" },
+ },
+ {
+ field: "Amount",
+ operator: flowTypes.FlowRecordFilterOperator.GREATER_THAN,
+ value: { numberValue: "100000" },
+ },
+ ],
+ },
+ transitions: [],
+ };
- // Create a fresh generator with the record change mock flow
- class ConcreteUmlGenerator extends UmlGenerator {
- getHeader(label: string): string {
- return label;
- }
- toUmlString(node: DiagramNode): string {
- let result = `state ${node.type} ${node.id}`;
- if (node.innerNodes) {
- const innerContent = node.innerNodes
- .map((innerNode) => {
- const header = [innerNode.type, innerNode.label]
- .filter(Boolean)
- .join(": ");
-
- const content = innerNode.content
- .map((line) => ` ${line}`)
- .join(EOL);
-
- return header ? ` ${header}${EOL}${content}` : content;
- })
- .join(EOL);
- result += EOL + innerContent;
- }
- return result;
- }
- getTransition(transition: Transition): string {
- return UML_REPRESENTATIONS.transition(transition.from, transition.to);
- }
- getFooter(): string {
- return "";
- }
- }
-
- const recordChangeGenerator = new ConcreteUmlGenerator(
- recordChangeMockFlow,
+ // Create a fresh generator with the record change mock flow
+ const recordChangeGenerator = new ConcreteUmlGenerator(
+ recordChangeMockFlow,
+ );
+ const uml = recordChangeGenerator.generateUml();
+
+ const expectedContent = [
+ "Process Type: Flow",
+ "Trigger Type: RecordBeforeSave",
+ "Object: Opportunity",
+ "Record Trigger: Update",
+ "Entry Type: Always",
+ "Filter Logic: 1 OR 2",
+ "1. StageName EqualTo Closed Won",
+ "2. Amount GreaterThan 100000",
+ ];
+
+ expectedContent.forEach((content) => {
+ assertEquals(
+ uml.includes(content),
+ true,
+ `Expected UML: ${uml} to contain: ${content}`,
);
- const uml = recordChangeGenerator.generateUml();
+ });
+ });
- const expectedContent = [
- "Process Type: Flow",
- "Trigger Type: RecordBeforeSave",
- "Object: Opportunity",
- "Record Trigger: Update",
- "Entry Type: Always",
- "Filter Logic: 1 OR 2",
- "1. StageName EqualTo Closed Won",
- "2. Amount GreaterThan 100000",
- ];
+ await t.step("should handle empty start node gracefully", () => {
+ // Create an empty mock flow with no start node
+ const emptyMockFlow: ParsedFlow = {
+ label: "Empty Test",
+ processType: undefined,
+ start: undefined,
+ transitions: [],
+ };
- expectedContent.forEach((content) => {
- assertEquals(
- uml.includes(content),
- true,
- `Expected UML: ${uml} to contain: ${content}`,
- );
- });
- },
- );
-
- await t.step(
- "should handle empty start node gracefully",
- () => {
- // Create an empty mock flow with no start node
- const emptyMockFlow: ParsedFlow = {
- label: "Empty Test",
- processType: undefined,
- start: undefined,
- transitions: [],
- };
+ // Create a fresh generator with the empty mock flow
+ const emptyGenerator = new ConcreteUmlGenerator(emptyMockFlow);
+ const uml = emptyGenerator.generateUml();
- // Create a fresh generator with the empty mock flow
- class ConcreteUmlGenerator extends UmlGenerator {
- getHeader(label: string): string {
- return label;
- }
- toUmlString(node: DiagramNode): string {
- let result = `state ${node.type} ${node.id}`;
- if (node.innerNodes) {
- const innerContent = node.innerNodes
- .map((innerNode) => {
- const header = [innerNode.type, innerNode.label]
- .filter(Boolean)
- .join(": ");
-
- const content = innerNode.content
- .map((line) => ` ${line}`)
- .join(EOL);
-
- return header ? ` ${header}${EOL}${content}` : content;
- })
- .join(EOL);
- result += EOL + innerContent;
- }
- return result;
- }
- getTransition(transition: Transition): string {
- return UML_REPRESENTATIONS.transition(transition.from, transition.to);
- }
- getFooter(): string {
- return "";
- }
- }
-
- const emptyGenerator = new ConcreteUmlGenerator(emptyMockFlow);
- const uml = emptyGenerator.generateUml();
-
- // Should not contain flow start node when undefined
- assertEquals(
- uml.includes("Flow Start FLOW_START"),
- false,
- "Should not contain flow start node when undefined",
- );
- },
- );
+ // Should not contain flow start node when undefined
+ assertEquals(
+ uml.includes("Flow Start FLOW_START"),
+ false,
+ "Should not contain flow start node when undefined",
+ );
+ });
});
diff --git a/src/test/uml_writer_test.ts b/src/test/uml_writer_test.ts
index 5efaf78..15f476e 100644
--- a/src/test/uml_writer_test.ts
+++ b/src/test/uml_writer_test.ts
@@ -22,11 +22,12 @@ import {
Configuration,
DiagramTool,
Mode,
- RuntimeConfig,
+ type RuntimeConfig,
} from "../main/argument_processor.ts";
-import { FlowDifference } from "../main/flow_to_uml_transformer.ts";
+import type { FlowDifference } from "../main/flow_to_uml_transformer.ts";
import { UmlWriter } from "../main/uml_writer.ts";
-import { GithubClient, GithubComment } from "../main/github_client.ts";
+import type { GithubClient, GithubComment } from "../main/github_client.ts";
+import { EOL } from "../main/constants.ts";
const TEST_UNDECLARED_OUTPUTS_DIR = "./";
@@ -195,21 +196,15 @@ Deno.test("UmlWriter", async (t) => {
assertExists(existsSync(expectedFile1Path));
assertExists(existsSync(expectedFile2Path));
- // Use OS-appropriate newlines for test expectations
- const eol = Deno.build.os === "windows" ? "\r\n" : "\n";
-
// Check the content of the first file (no old version)
fileContent = Deno.readTextFileSync(expectedFile1Path);
- assertEquals(
- fileContent,
- `\`\`\`mermaid${eol}uml1${eol}\`\`\`${eol}`,
- );
+ assertEquals(fileContent, `\`\`\`mermaid${EOL}uml1${EOL}\`\`\`${EOL}`);
// Check the content of the second file (with old version)
fileContent = Deno.readTextFileSync(expectedFile2Path);
assertEquals(
fileContent,
- `## Old Version${eol}${eol}\`\`\`mermaid${eol}uml1${eol}\`\`\`${eol}${eol}## New Version${eol}${eol}\`\`\`mermaid${eol}uml2${eol}\`\`\`${eol}`,
+ `## Old Version${EOL}${EOL}\`\`\`mermaid${EOL}uml1${EOL}\`\`\`${EOL}${EOL}## New Version${EOL}${EOL}\`\`\`mermaid${EOL}uml2${EOL}\`\`\`${EOL}`,
);
// Clean up
diff --git a/src/test/test_utils.ts b/src/test/utilities/mock_config.ts
similarity index 93%
rename from src/test/test_utils.ts
rename to src/test/utilities/mock_config.ts
index 46df948..4220ce7 100644
--- a/src/test/test_utils.ts
+++ b/src/test/utilities/mock_config.ts
@@ -17,8 +17,8 @@
import {
DiagramTool,
Mode,
- RuntimeConfig,
-} from "../main/argument_processor.ts";
+ type RuntimeConfig,
+} from "../../main/argument_processor.ts";
/**
* The test configuration object that is used by the tests.
diff --git a/src/test/utilities/mock_flow.ts b/src/test/utilities/mock_flow.ts
new file mode 100644
index 0000000..6e2e62b
--- /dev/null
+++ b/src/test/utilities/mock_flow.ts
@@ -0,0 +1,272 @@
+/**
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import * as flowTypes from "../../main/flow_types.ts";
+import type { ParsedFlow } from "../../main/flow_parser.ts";
+
+export const NODE_NAMES = {
+ label: "test",
+ start: "FLOW_START",
+ apexPluginCall: "myApexPluginCall",
+ assignment: "myAssignment",
+ collectionProcessor: "myCollectionProcessor",
+ decision: "myDecision",
+ loop: "myLoop",
+ orchestratedStage: "myOrchestratedStage",
+ stageSteps: ["step1", "step2", "step3"],
+ recordCreate: "myRecordCreate",
+ recordDelete: "myRecordDelete",
+ recordLookup: "myRecordLookup",
+ recordRollback: "myRecordRollback",
+ recordUpdate: "myRecordUpdate",
+ screen: "myScreen",
+ step: "myStep",
+ subflow: "mySubflow",
+ transform: "myTransform",
+ wait: "myWait",
+ actionCall: "myActionCall",
+};
+
+export function generateMockFlow(): ParsedFlow {
+ return {
+ label: NODE_NAMES.label,
+ start: {
+ name: NODE_NAMES.start,
+ } as flowTypes.FlowStart,
+ apexPluginCalls: getFlowNodes(
+ NODE_NAMES.apexPluginCall,
+ ) as flowTypes.FlowApexPluginCall[],
+ assignments: getFlowNodes(
+ NODE_NAMES.assignment,
+ ) as flowTypes.FlowAssignment[],
+ collectionProcessors: getFlowNodes(
+ NODE_NAMES.collectionProcessor,
+ ) as flowTypes.FlowCollectionProcessor[],
+ decisions: getFlowNodes(NODE_NAMES.decision) as flowTypes.FlowDecision[],
+ loops: getFlowNodes(NODE_NAMES.loop) as flowTypes.FlowLoop[],
+ orchestratedStages: getFlowNodes(
+ NODE_NAMES.orchestratedStage,
+ ) as flowTypes.FlowOrchestratedStage[],
+ recordCreates: getFlowNodes(
+ NODE_NAMES.recordCreate,
+ ) as flowTypes.FlowRecordCreate[],
+ recordDeletes: getFlowNodes(
+ NODE_NAMES.recordDelete,
+ ) as flowTypes.FlowRecordDelete[],
+ recordLookups: getFlowNodes(
+ NODE_NAMES.recordLookup,
+ ) as flowTypes.FlowRecordLookup[],
+ recordRollbacks: getFlowNodes(
+ NODE_NAMES.recordRollback,
+ ) as flowTypes.FlowRecordRollback[],
+ recordUpdates: getFlowNodes(
+ NODE_NAMES.recordUpdate,
+ ) as flowTypes.FlowRecordUpdate[],
+ screens: getFlowNodes(NODE_NAMES.screen) as flowTypes.FlowScreen[],
+ steps: getFlowNodes(NODE_NAMES.step) as flowTypes.FlowStep[],
+ subflows: getFlowNodes(NODE_NAMES.subflow) as flowTypes.FlowSubflow[],
+ transforms: getFlowNodes(NODE_NAMES.transform) as flowTypes.FlowTransform[],
+ waits: getFlowNodes(NODE_NAMES.wait) as flowTypes.FlowWait[],
+ actionCalls: getFlowNodes(
+ NODE_NAMES.actionCall,
+ ) as flowTypes.FlowActionCall[],
+ transitions: [
+ {
+ from: NODE_NAMES.start,
+ to: NODE_NAMES.apexPluginCall,
+ fault: false,
+ },
+ {
+ from: NODE_NAMES.apexPluginCall,
+ to: NODE_NAMES.assignment,
+ fault: false,
+ label: "Normal Transition",
+ },
+ {
+ from: NODE_NAMES.assignment,
+ to: NODE_NAMES.decision,
+ fault: true,
+ label: "Error Path",
+ },
+ {
+ from: NODE_NAMES.assignment,
+ to: NODE_NAMES.collectionProcessor,
+ fault: false,
+ },
+ ],
+ };
+}
+
+type NodeFactory = (name: string) => flowTypes.FlowNode[];
+
+const NODE_FACTORIES: Record = {
+ [NODE_NAMES.recordUpdate]: createRecordUpdateNode,
+ [NODE_NAMES.recordLookup]: createRecordLookupNode,
+ [NODE_NAMES.recordCreate]: createRecordCreateNode,
+ [NODE_NAMES.recordDelete]: createRecordDeleteNode,
+ [NODE_NAMES.assignment]: createAssignmentNode,
+ [NODE_NAMES.decision]: generateDecision,
+ [NODE_NAMES.orchestratedStage]: (name: string) =>
+ generateStage(name, NODE_NAMES.stageSteps),
+};
+
+function getFlowNodes(name: string): flowTypes.FlowNode[] {
+ const factory = NODE_FACTORIES[name];
+ return factory ? factory(name) : createBasicNode(name);
+}
+
+function createBaseNode(name: string) {
+ return {
+ name,
+ label: name,
+ locationX: 0,
+ locationY: 0,
+ description: "",
+ };
+}
+
+function createRecordUpdateNode(name: string): flowTypes.FlowRecordUpdate[] {
+ return [
+ {
+ ...createBaseNode(name),
+ object: "Account",
+ inputAssignments: [],
+ inputReference: "",
+ elementSubtype: "RecordUpdate",
+ filters: [],
+ },
+ ];
+}
+
+function createRecordLookupNode(name: string): flowTypes.FlowRecordLookup[] {
+ return [
+ {
+ ...createBaseNode(name),
+ object: "Account",
+ elementSubtype: "RecordLookup",
+ filters: [],
+ queriedFields: [],
+ },
+ ];
+}
+
+function createRecordCreateNode(name: string): flowTypes.FlowRecordCreate[] {
+ return [
+ {
+ ...createBaseNode(name),
+ object: "Account",
+ elementSubtype: "RecordCreate",
+ inputAssignments: [],
+ inputReference: "",
+ },
+ ];
+}
+
+function createRecordDeleteNode(name: string): flowTypes.FlowRecordDelete[] {
+ return [
+ {
+ ...createBaseNode(name),
+ object: "Account",
+ elementSubtype: "RecordDelete",
+ inputReference: "",
+ },
+ ];
+}
+
+function createAssignmentNode(name: string): flowTypes.FlowAssignment[] {
+ return [
+ {
+ ...createBaseNode(name),
+ elementSubtype: "Assignment",
+ assignmentItems: [
+ {
+ assignToReference: "var1",
+ operator: flowTypes.FlowAssignmentOperator.ASSIGN,
+ value: {
+ stringValue: "Hello World",
+ },
+ processMetadataValues: [],
+ },
+ {
+ assignToReference: "var2",
+ operator: flowTypes.FlowAssignmentOperator.ADD_ITEM,
+ value: {
+ stringValue: "Test Value",
+ },
+ processMetadataValues: [],
+ },
+ ],
+ },
+ ];
+}
+
+function createBasicNode(name: string): flowTypes.FlowNode[] {
+ return [
+ {
+ ...createBaseNode(name),
+ elementSubtype: "Unknown",
+ },
+ ];
+}
+
+function generateStage(
+ name: string,
+ stepNames: string[] = [],
+): flowTypes.FlowOrchestratedStage[] {
+ return [
+ {
+ ...createBaseNode(name),
+ elementSubtype: "OrchestratedStage",
+ stageSteps: stepNames.map((stepName) => ({
+ name: stepName,
+ label: stepName,
+ elementSubtype: "Step",
+ locationX: 0,
+ locationY: 0,
+ description: stepName,
+ actionName: `${stepName}Action`,
+ actionType: flowTypes.FlowStageStepActionType.STEP_BACKGROUND,
+ })),
+ },
+ ];
+}
+
+function generateDecision(name: string): flowTypes.FlowDecision[] {
+ return [
+ {
+ ...createBaseNode(name),
+ elementSubtype: "Decision",
+ rules: [
+ {
+ name: `${name}Rule`,
+ label: `${name}Rule`,
+ description: `${name}Rule`,
+ conditionLogic: "and",
+ conditions: [
+ {
+ leftValueReference: "foo",
+ operator: flowTypes.FlowComparisonOperator.EQUAL_TO,
+ rightValue: {
+ booleanValue: "true",
+ },
+ processMetadataValues: [],
+ },
+ ],
+ },
+ ],
+ },
+ ];
+}