From e547412d784533cdaa7cb2a06d2314406b56e616 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Tue, 19 Aug 2025 12:57:01 +0100 Subject: [PATCH 01/43] test: update to latest version of jest --- package-lock.json | 2916 +++++++++++------ package.json | 7 +- src/commands/configure.spec.ts | 4 +- src/commands/content-item/archive.spec.ts | 48 +- src/commands/content-item/unarchive.spec.ts | 48 +- src/commands/content-repository/list.spec.ts | 4 +- src/commands/content-type-schema/list.spec.ts | 6 +- src/commands/content-type/import.spec.ts | 4 +- src/commands/content-type/list.spec.ts | 4 +- src/commands/content-type/register.spec.ts | 2 +- src/commands/event/archive.spec.ts | 16 +- src/commands/event/import.spec.ts | 2 +- .../hub/steps/event-clone-step.spec.ts | 18 +- .../hub/steps/extension-clone-step.spec.ts | 16 +- .../hub/steps/index-clone-step.spec.ts | 16 +- .../hub/steps/schema-clone-step.spec.ts | 2 +- .../hub/steps/type-clone-step.spec.ts | 18 +- src/common/file-log.spec.ts | 10 +- .../yargs/yargs-object-transformer.spec.ts | 4 +- src/services/export.service.spec.ts | 2 +- src/services/import.service.spec.ts | 4 +- 21 files changed, 1979 insertions(+), 1172 deletions(-) diff --git a/package-lock.json b/package-lock.json index ad9fae87..e0ba6c2a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -36,7 +36,8 @@ "@eslint/js": "^9.20.0", "@types/cli-progress": "^3.11.6", "@types/fs-extra": "^9.0.13", - "@types/jest": "^29.5.14", + "@types/graceful-fs": "^4.1.9", + "@types/jest": "^30.0.0", "@types/lodash": "^4.14.144", "@types/node": "^20.17.19", "@types/node-fetch": "^2.5.7", @@ -55,10 +56,10 @@ "eslint-plugin-prettier": "^5.2.3", "globals": "^15.15.0", "husky": "^3.0.5", - "jest": "^29.7.0", + "jest": "^30.0.5", "nock": "^12.0.3", "prettier": "^3.5.1", - "ts-jest": "^29.2.5", + "ts-jest": "^29.4.1", "ts-node": "^10.9.1", "typescript": "^5.7.3" }, @@ -107,9 +108,9 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.26.8", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz", - "integrity": "sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==", + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.0.tgz", + "integrity": "sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==", "dev": true, "license": "MIT", "engines": { @@ -117,22 +118,22 @@ } }, "node_modules/@babel/core": { - "version": "7.26.9", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.9.tgz", - "integrity": "sha512-lWBYIrF7qK5+GjY5Uy+/hEgp8OJWOD/rpy74GplYRhEauvbHDeFB8t5hPOZxCZ0Oxf4Cc36tK51/l3ymJysrKw==", + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.3.tgz", + "integrity": "sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ==", "dev": true, "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.26.2", - "@babel/generator": "^7.26.9", - "@babel/helper-compilation-targets": "^7.26.5", - "@babel/helper-module-transforms": "^7.26.0", - "@babel/helpers": "^7.26.9", - "@babel/parser": "^7.26.9", - "@babel/template": "^7.26.9", - "@babel/traverse": "^7.26.9", - "@babel/types": "^7.26.9", + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.28.3", + "@babel/helpers": "^7.28.3", + "@babel/parser": "^7.28.3", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.3", + "@babel/types": "^7.28.2", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -158,16 +159,16 @@ } }, "node_modules/@babel/generator": { - "version": "7.27.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.5.tgz", - "integrity": "sha512-ZGhA37l0e/g2s1Cnzdix0O3aLYm66eF8aufiVteOgnwxgnRP8GoyMj7VWsgWnQbVKXyge7hqrFh2K2TQM6t1Hw==", + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/parser": "^7.27.5", - "@babel/types": "^7.27.3", - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25", + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" }, "engines": { @@ -175,9 +176,9 @@ } }, "node_modules/@babel/generator/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.27", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.27.tgz", - "integrity": "sha512-VO95AxtSFMelbg3ouljAYnfvTEwSWVt/2YLf+U5Ejd8iT5mXE2Sa/1LGyvySMne2CGsepGLI7KpF3EzE3Aq9Mg==", + "version": "0.3.30", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.30.tgz", + "integrity": "sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==", "dev": true, "license": "MIT", "dependencies": { @@ -186,14 +187,14 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.26.5.tgz", - "integrity": "sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==", + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.26.5", - "@babel/helper-validator-option": "^7.25.9", + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" @@ -212,30 +213,40 @@ "semver": "bin/semver.js" } }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/helper-module-imports": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", - "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", "dev": true, "license": "MIT", "dependencies": { - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.25.9" + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", - "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", + "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9", - "@babel/traverse": "^7.25.9" + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.28.3" }, "engines": { "node": ">=6.9.0" @@ -245,9 +256,9 @@ } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.26.5.tgz", - "integrity": "sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", "dev": true, "license": "MIT", "engines": { @@ -275,9 +286,9 @@ } }, "node_modules/@babel/helper-validator-option": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", - "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", "dev": true, "license": "MIT", "engines": { @@ -285,9 +296,9 @@ } }, "node_modules/@babel/helpers": { - "version": "7.28.2", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.2.tgz", - "integrity": "sha512-/V9771t+EgXz62aCcyofnQhGM8DQACbRhvzKFsXKC9QM+5MadF8ZmIm0crDMaz3+o0h0zXfJnd4EhbYbxsrcFw==", + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.3.tgz", + "integrity": "sha512-PTNtvUQihsAsDHMOP5pfobP8C6CM4JWXmP8DrEIt46c3r2bf87Ua1zoqevsMo9g+tWDwgWrFP5EIxuBx5RudAw==", "dev": true, "license": "MIT", "dependencies": { @@ -299,13 +310,13 @@ } }, "node_modules/@babel/parser": { - "version": "7.27.7", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.7.tgz", - "integrity": "sha512-qnzXzDXdr/po3bOTbTIQZ7+TxNKxpkN5IifVLXS+r7qwynkZfPyjZfE7hCXbo7IoO9TNcSyibgONsf2HauUd3Q==", + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.3.tgz", + "integrity": "sha512-7+Ey1mAgYqFAx2h0RuoxcQT5+MlG3GTV0TQrgr7/ZliKsm/MNDxVVutlWaziMq7wJNAz8MTqz55XLpWvva6StA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.27.7" + "@babel/types": "^7.28.2" }, "bin": { "parser": "bin/babel-parser.js" @@ -412,13 +423,13 @@ } }, "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz", - "integrity": "sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", + "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.25.9" + "@babel/helper-plugin-utils": "^7.27.1" }, "engines": { "node": ">=6.9.0" @@ -538,13 +549,13 @@ } }, "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz", - "integrity": "sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", + "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.25.9" + "@babel/helper-plugin-utils": "^7.27.1" }, "engines": { "node": ">=6.9.0" @@ -569,34 +580,24 @@ } }, "node_modules/@babel/traverse": { - "version": "7.26.9", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.9.tgz", - "integrity": "sha512-ZYW7L+pL8ahU5fXmNbPF+iZFHCv5scFak7MZ9bwaRPLUhHh7QQEMjZUg0HevihoqCM5iSYHN61EyCoZvqC+bxg==", + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.3.tgz", + "integrity": "sha512-7w4kZYHneL3A6NP2nxzHvT3HCZ7puDZZjFMqDpBPECub79sTtSO5CGXDkKrTQq8ksAwfD/XI2MRFX23njdDaIQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/generator": "^7.26.9", - "@babel/parser": "^7.26.9", - "@babel/template": "^7.26.9", - "@babel/types": "^7.26.9", - "debug": "^4.3.1", - "globals": "^11.1.0" + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.3", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.2", + "debug": "^4.3.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/traverse/node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, "node_modules/@babel/types": { "version": "7.28.2", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.2.tgz", @@ -1242,6 +1243,40 @@ "node": ">=12" } }, + "node_modules/@emnapi/core": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.4.5.tgz", + "integrity": "sha512-XsLw1dEOpkSX/WucdqUhPWP7hDxSvZiY+fsUC14h+FtQ2Ifni4znbBt8punRX+Uj2JG/uDb8nEHVKvrVlvdZ5Q==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.0.4", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.4.5.tgz", + "integrity": "sha512-++LApOtY0pEEz1zrd9vy1/zXVaVJJ/EbAF3u0fXIzPJEDtnITsBGbbK0EkM72amhl/R5b+5xx0Y/QhcVOpuulg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.0.4.tgz", + "integrity": "sha512-PJR+bOmMOPH8AtcTGAyYNiuJ3/Fcoj2XN/gBEWzDIKh254XO+mM9XoXHk5GNEhodxeMznbg7BlRojVbKN+gC6g==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@eslint-community/eslint-utils": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz", @@ -1469,6 +1504,109 @@ "url": "https://github.com/sponsors/nzakas" } }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.0.tgz", + "integrity": "sha512-TKY5pyBkHyADOPYlRT9Lx6F544mPl0vS5Ew7BJ45hA08Q+t3GjbueLliBWN3sMICk6+y7HdyxSzC4bWS8baBdg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/@isaacs/fs-minipass": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", @@ -1570,21 +1708,21 @@ } }, "node_modules/@jest/console": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", - "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-30.0.5.tgz", + "integrity": "sha512-xY6b0XiL0Nav3ReresUarwl2oIz1gTnxGbGpho9/rbUWsLH0f1OD/VT84xs8c7VmH7MChnLb0pag6PhZhAdDiA==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", + "@jest/types": "30.0.5", "@types/node": "*", - "chalk": "^4.0.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", + "chalk": "^4.1.2", + "jest-message-util": "30.0.5", + "jest-util": "30.0.5", "slash": "^3.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/console/node_modules/ansi-styles": { @@ -1664,43 +1802,43 @@ } }, "node_modules/@jest/core": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", - "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-30.0.5.tgz", + "integrity": "sha512-fKD0OulvRsXF1hmaFgHhVJzczWzA1RXMMo9LTPuFXo9q/alDbME3JIyWYqovWsUBWSoBcsHaGPSLF9rz4l9Qeg==", "dev": true, "license": "MIT", "dependencies": { - "@jest/console": "^29.7.0", - "@jest/reporters": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/console": "30.0.5", + "@jest/pattern": "30.0.1", + "@jest/reporters": "30.0.5", + "@jest/test-result": "30.0.5", + "@jest/transform": "30.0.5", + "@jest/types": "30.0.5", "@types/node": "*", - "ansi-escapes": "^4.2.1", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "exit": "^0.1.2", - "graceful-fs": "^4.2.9", - "jest-changed-files": "^29.7.0", - "jest-config": "^29.7.0", - "jest-haste-map": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-resolve-dependencies": "^29.7.0", - "jest-runner": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "jest-watcher": "^29.7.0", - "micromatch": "^4.0.4", - "pretty-format": "^29.7.0", - "slash": "^3.0.0", - "strip-ansi": "^6.0.0" + "ansi-escapes": "^4.3.2", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "exit-x": "^0.2.2", + "graceful-fs": "^4.2.11", + "jest-changed-files": "30.0.5", + "jest-config": "30.0.5", + "jest-haste-map": "30.0.5", + "jest-message-util": "30.0.5", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.0.5", + "jest-resolve-dependencies": "30.0.5", + "jest-runner": "30.0.5", + "jest-runtime": "30.0.5", + "jest-snapshot": "30.0.5", + "jest-util": "30.0.5", + "jest-validate": "30.0.5", + "jest-watcher": "30.0.5", + "micromatch": "^4.0.8", + "pretty-format": "30.0.5", + "slash": "^3.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" @@ -1745,9 +1883,9 @@ } }, "node_modules/@jest/core/node_modules/ci-info": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", - "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.0.tgz", + "integrity": "sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ==", "dev": true, "funding": [ { @@ -1790,19 +1928,6 @@ "node": ">=8" } }, - "node_modules/@jest/core/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/core/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -1816,117 +1941,150 @@ "node": ">=8" } }, + "node_modules/@jest/diff-sequences": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/diff-sequences/-/diff-sequences-30.0.1.tgz", + "integrity": "sha512-n5H8QLDJ47QqbCNn5SuFjCRDrOLEZ0h8vAHCK5RL9Ls7Xa8AQLa/YxAc9UjFqoEDM48muwtBGjtMY5cr0PLDCw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, "node_modules/@jest/environment": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", - "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-30.0.5.tgz", + "integrity": "sha512-aRX7WoaWx1oaOkDQvCWImVQ8XNtdv5sEWgk4gxR6NXb7WBUnL5sRak4WRzIQRZ1VTWPvV4VI4mgGjNL9TeKMYA==", "dev": true, "license": "MIT", "dependencies": { - "@jest/fake-timers": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/fake-timers": "30.0.5", + "@jest/types": "30.0.5", "@types/node": "*", - "jest-mock": "^29.7.0" + "jest-mock": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/expect": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", - "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-30.0.5.tgz", + "integrity": "sha512-6udac8KKrtTtC+AXZ2iUN/R7dp7Ydry+Fo6FPFnDG54wjVMnb6vW/XNlf7Xj8UDjAE3aAVAsR4KFyKk3TCXmTA==", "dev": true, "license": "MIT", "dependencies": { - "expect": "^29.7.0", - "jest-snapshot": "^29.7.0" + "expect": "30.0.5", + "jest-snapshot": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/expect-utils": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", - "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-30.0.5.tgz", + "integrity": "sha512-F3lmTT7CXWYywoVUGTCmom0vXq3HTTkaZyTAzIy+bXSBizB7o5qzlC9VCtq0arOa8GqmNsbg/cE9C6HLn7Szew==", "dev": true, "license": "MIT", "dependencies": { - "jest-get-type": "^29.6.3" + "@jest/get-type": "30.0.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/fake-timers": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", - "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-30.0.5.tgz", + "integrity": "sha512-ZO5DHfNV+kgEAeP3gK3XlpJLL4U3Sz6ebl/n68Uwt64qFFs5bv4bfEEjyRGK5uM0C90ewooNgFuKMdkbEoMEXw==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", - "@sinonjs/fake-timers": "^10.0.2", + "@jest/types": "30.0.5", + "@sinonjs/fake-timers": "^13.0.0", "@types/node": "*", - "jest-message-util": "^29.7.0", - "jest-mock": "^29.7.0", - "jest-util": "^29.7.0" + "jest-message-util": "30.0.5", + "jest-mock": "30.0.5", + "jest-util": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/get-type": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/get-type/-/get-type-30.0.1.tgz", + "integrity": "sha512-AyYdemXCptSRFirI5EPazNxyPwAL0jXt3zceFjaj8NFiKP9pOi0bfXonf6qkf82z2t3QWPeLCWWw4stPBzctLw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/globals": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", - "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-30.0.5.tgz", + "integrity": "sha512-7oEJT19WW4oe6HR7oLRvHxwlJk2gev0U9px3ufs8sX9PoD1Eza68KF0/tlN7X0dq/WVsBScXQGgCldA1V9Y/jA==", "dev": true, "license": "MIT", "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/expect": "^29.7.0", - "@jest/types": "^29.6.3", - "jest-mock": "^29.7.0" + "@jest/environment": "30.0.5", + "@jest/expect": "30.0.5", + "@jest/types": "30.0.5", + "jest-mock": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/pattern": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/pattern/-/pattern-30.0.1.tgz", + "integrity": "sha512-gWp7NfQW27LaBQz3TITS8L7ZCQ0TLvtmI//4OwlQRx4rnWxcPNIYjxZpDcN4+UlGxgm3jS5QPz8IPTCkb59wZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "jest-regex-util": "30.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/reporters": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", - "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-30.0.5.tgz", + "integrity": "sha512-mafft7VBX4jzED1FwGC1o/9QUM2xebzavImZMeqnsklgcyxBto8mV4HzNSzUrryJ+8R9MFOM3HgYuDradWR+4g==", "dev": true, "license": "MIT", "dependencies": { "@bcoe/v8-coverage": "^0.2.3", - "@jest/console": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@jridgewell/trace-mapping": "^0.3.18", + "@jest/console": "30.0.5", + "@jest/test-result": "30.0.5", + "@jest/transform": "30.0.5", + "@jest/types": "30.0.5", + "@jridgewell/trace-mapping": "^0.3.25", "@types/node": "*", - "chalk": "^4.0.0", - "collect-v8-coverage": "^1.0.0", - "exit": "^0.1.2", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", + "chalk": "^4.1.2", + "collect-v8-coverage": "^1.0.2", + "exit-x": "^0.2.2", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", "istanbul-lib-coverage": "^3.0.0", "istanbul-lib-instrument": "^6.0.0", "istanbul-lib-report": "^3.0.0", - "istanbul-lib-source-maps": "^4.0.0", + "istanbul-lib-source-maps": "^5.0.0", "istanbul-reports": "^3.1.3", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "jest-worker": "^29.7.0", + "jest-message-util": "30.0.5", + "jest-util": "30.0.5", + "jest-worker": "30.0.5", "slash": "^3.0.0", - "string-length": "^4.0.1", - "strip-ansi": "^6.0.0", + "string-length": "^4.0.2", "v8-to-istanbul": "^9.0.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" @@ -1938,9 +2096,9 @@ } }, "node_modules/@jest/reporters/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "version": "0.3.30", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.30.tgz", + "integrity": "sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==", "dev": true, "license": "MIT", "dependencies": { @@ -1964,6 +2122,16 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/@jest/reporters/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, "node_modules/@jest/reporters/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -2001,6 +2169,27 @@ "dev": true, "license": "MIT" }, + "node_modules/@jest/reporters/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@jest/reporters/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -2011,17 +2200,20 @@ "node": ">=8" } }, - "node_modules/@jest/reporters/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "node_modules/@jest/reporters/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", "dev": true, - "license": "MIT", + "license": "ISC", "dependencies": { - "ansi-regex": "^5.0.1" + "brace-expansion": "^2.0.1" }, "engines": { - "node": ">=8" + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/@jest/reporters/node_modules/supports-color": { @@ -2038,37 +2230,129 @@ } }, "node_modules/@jest/schemas": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", - "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/snapshot-utils": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/snapshot-utils/-/snapshot-utils-30.0.5.tgz", + "integrity": "sha512-XcCQ5qWHLvi29UUrowgDFvV4t7ETxX91CbDczMnoqXPOIcZOxyNdSjm6kV5XMc8+HkxfRegU/MUmnTbJRzGrUQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.0.5", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "natural-compare": "^1.4.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/snapshot-utils/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/snapshot-utils/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/snapshot-utils/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@jest/snapshot-utils/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/snapshot-utils/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/snapshot-utils/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "license": "MIT", "dependencies": { - "@sinclair/typebox": "^0.27.8" + "has-flag": "^4.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": ">=8" } }, "node_modules/@jest/source-map": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", - "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-30.0.1.tgz", + "integrity": "sha512-MIRWMUUR3sdbP36oyNyhbThLHyJ2eEDClPCiHVbrYAe5g3CHRArIVpBw7cdSB5fr+ofSfIb2Tnsw8iEHL0PYQg==", "dev": true, "license": "MIT", "dependencies": { - "@jridgewell/trace-mapping": "^0.3.18", - "callsites": "^3.0.0", - "graceful-fs": "^4.2.9" + "@jridgewell/trace-mapping": "^0.3.25", + "callsites": "^3.1.0", + "graceful-fs": "^4.2.11" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/source-map/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "version": "0.3.30", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.30.tgz", + "integrity": "sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==", "dev": true, "license": "MIT", "dependencies": { @@ -2077,62 +2361,62 @@ } }, "node_modules/@jest/test-result": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", - "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-30.0.5.tgz", + "integrity": "sha512-wPyztnK0gbDMQAJZ43tdMro+qblDHH1Ru/ylzUo21TBKqt88ZqnKKK2m30LKmLLoKtR2lxdpCC/P3g1vfKcawQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/console": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "collect-v8-coverage": "^1.0.0" + "@jest/console": "30.0.5", + "@jest/types": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "collect-v8-coverage": "^1.0.2" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/test-sequencer": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", - "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-30.0.5.tgz", + "integrity": "sha512-Aea/G1egWoIIozmDD7PBXUOxkekXl7ueGzrsGGi1SbeKgQqCYCIf+wfbflEbf2LiPxL8j2JZGLyrzZagjvW4YQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/test-result": "^29.7.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", + "@jest/test-result": "30.0.5", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.0.5", "slash": "^3.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/transform": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", - "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.0.5.tgz", + "integrity": "sha512-Vk8amLQCmuZyy6GbBht1Jfo9RSdBtg7Lks+B0PecnjI8J+PCLQPGh7uI8Q/2wwpW2gLdiAfiHNsmekKlywULqg==", "dev": true, "license": "MIT", "dependencies": { - "@babel/core": "^7.11.6", - "@jest/types": "^29.6.3", - "@jridgewell/trace-mapping": "^0.3.18", - "babel-plugin-istanbul": "^6.1.1", - "chalk": "^4.0.0", + "@babel/core": "^7.27.4", + "@jest/types": "30.0.5", + "@jridgewell/trace-mapping": "^0.3.25", + "babel-plugin-istanbul": "^7.0.0", + "chalk": "^4.1.2", "convert-source-map": "^2.0.0", "fast-json-stable-stringify": "^2.1.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-util": "^29.7.0", - "micromatch": "^4.0.4", - "pirates": "^4.0.4", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.0.5", + "jest-regex-util": "30.0.1", + "jest-util": "30.0.5", + "micromatch": "^4.0.8", + "pirates": "^4.0.7", "slash": "^3.0.0", - "write-file-atomic": "^4.0.2" + "write-file-atomic": "^5.0.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/transform/node_modules/@jridgewell/trace-mapping": { @@ -2223,21 +2507,22 @@ } }, "node_modules/@jest/types": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", - "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.0.5.tgz", + "integrity": "sha512-aREYa3aku9SSnea4aX6bhKn4bgv3AXkgijoQgbYV3yvbiGt6z+MQ85+6mIhx9DsKW2BuB/cLR/A+tcMThx+KLQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/schemas": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^3.0.0", + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", "@types/node": "*", - "@types/yargs": "^17.0.8", - "chalk": "^4.0.0" + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@jest/types/node_modules/@types/yargs": { @@ -2327,18 +2612,14 @@ } }, "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", - "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", "dev": true, "license": "MIT", "dependencies": { - "@jridgewell/set-array": "^1.2.1", - "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" } }, "node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping": { @@ -2361,21 +2642,12 @@ "node": ">=6.0.0" } }, - "node_modules/@jridgewell/set-array": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", - "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", - "dev": true + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { "version": "0.3.9", @@ -2387,6 +2659,19 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz", + "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@tybys/wasm-util": "^0.10.0" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -2422,6 +2707,17 @@ "node": ">= 8" } }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, "node_modules/@pkgr/core": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.1.1.tgz", @@ -2436,9 +2732,9 @@ } }, "node_modules/@sinclair/typebox": { - "version": "0.27.8", - "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", - "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "version": "0.34.40", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.40.tgz", + "integrity": "sha512-gwBNIP8ZAYev/ORDWW0QvxdwPXwxBtLsdsJgSc7eDIRt8ubP+rxUBzPsrwnu16fgEF8Bx4lh/+mvQvJzcTM6Kw==", "dev": true, "license": "MIT" }, @@ -2453,13 +2749,13 @@ } }, "node_modules/@sinonjs/fake-timers": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", - "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "version": "13.0.5", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-13.0.5.tgz", + "integrity": "sha512-36/hTbH2uaWuGVERyC6da9YwGWnzUZXuPro/F2LfsdOsLnCojz/iSH8MxUt/FD2S5XBSVPhmArFUXcpCQ2Hkiw==", "dev": true, "license": "BSD-3-Clause", "dependencies": { - "@sinonjs/commons": "^3.0.0" + "@sinonjs/commons": "^3.0.1" } }, "node_modules/@tsconfig/node10": { @@ -2486,6 +2782,17 @@ "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", "dev": true }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.0.tgz", + "integrity": "sha512-VyyPYFlOMNylG45GoAe0xDoLwWuowvf92F9kySqzYh8vmYm7D2u4iUJKa1tOUpS70Ku13ASrOkS4ScXFsTaCNQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@types/babel__core": { "version": "7.20.5", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", @@ -2501,9 +2808,9 @@ } }, "node_modules/@types/babel__generator": { - "version": "7.6.8", - "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.8.tgz", - "integrity": "sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", "dev": true, "license": "MIT", "dependencies": { @@ -2522,13 +2829,13 @@ } }, "node_modules/@types/babel__traverse": { - "version": "7.20.6", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", - "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.20.7" + "@babel/types": "^7.28.2" } }, "node_modules/@types/cli-progress": { @@ -2622,14 +2929,14 @@ } }, "node_modules/@types/jest": { - "version": "29.5.14", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", - "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "version": "30.0.0", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-30.0.0.tgz", + "integrity": "sha512-XTYugzhuwqWjws0CVz8QpM36+T+Dz5mTEBKhNs/esGLnCIlGdRy+Dq78NRjd7ls7r8BC8ZRMOrKlkO1hU0JOwA==", "dev": true, "license": "MIT", "dependencies": { - "expect": "^29.0.0", - "pretty-format": "^29.0.0" + "expect": "^30.0.0", + "pretty-format": "^30.0.0" } }, "node_modules/@types/json-schema": { @@ -2929,18 +3236,294 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", + "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, + "node_modules/@unrs/resolver-binding-android-arm-eabi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm-eabi/-/resolver-binding-android-arm-eabi-1.11.1.tgz", + "integrity": "sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-android-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm64/-/resolver-binding-android-arm64-1.11.1.tgz", + "integrity": "sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-arm64/-/resolver-binding-darwin-arm64-1.11.1.tgz", + "integrity": "sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-x64/-/resolver-binding-darwin-x64-1.11.1.tgz", + "integrity": "sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-freebsd-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-freebsd-x64/-/resolver-binding-freebsd-x64-1.11.1.tgz", + "integrity": "sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-gnueabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-gnueabihf/-/resolver-binding-linux-arm-gnueabihf-1.11.1.tgz", + "integrity": "sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-musleabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-musleabihf/-/resolver-binding-linux-arm-musleabihf-1.11.1.tgz", + "integrity": "sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-gnu/-/resolver-binding-linux-arm64-gnu-1.11.1.tgz", + "integrity": "sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-musl/-/resolver-binding-linux-arm64-musl-1.11.1.tgz", + "integrity": "sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-ppc64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-ppc64-gnu/-/resolver-binding-linux-ppc64-gnu-1.11.1.tgz", + "integrity": "sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-gnu/-/resolver-binding-linux-riscv64-gnu-1.11.1.tgz", + "integrity": "sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-musl/-/resolver-binding-linux-riscv64-musl-1.11.1.tgz", + "integrity": "sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-s390x-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-s390x-gnu/-/resolver-binding-linux-s390x-gnu-1.11.1.tgz", + "integrity": "sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-gnu/-/resolver-binding-linux-x64-gnu-1.11.1.tgz", + "integrity": "sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-musl/-/resolver-binding-linux-x64-musl-1.11.1.tgz", + "integrity": "sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-wasm32-wasi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-wasm32-wasi/-/resolver-binding-wasm32-wasi-1.11.1.tgz", + "integrity": "sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^0.2.11" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@unrs/resolver-binding-win32-arm64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-arm64-msvc/-/resolver-binding-win32-arm64-msvc-1.11.1.tgz", + "integrity": "sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-ia32-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-ia32-msvc/-/resolver-binding-win32-ia32-msvc-1.11.1.tgz", + "integrity": "sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-x64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-x64-msvc/-/resolver-binding-win32-x64-msvc-1.11.1.tgz", + "integrity": "sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==", + "cpu": [ + "x64" + ], "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] }, "node_modules/@yao-pkg/pkg": { "version": "6.5.1", @@ -3265,13 +3848,6 @@ "node": ">=4" } }, - "node_modules/async": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", - "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", - "dev": true, - "license": "MIT" - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -3362,25 +3938,25 @@ } }, "node_modules/babel-jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", - "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.0.5.tgz", + "integrity": "sha512-mRijnKimhGDMsizTvBTWotwNpzrkHr+VvZUQBof2AufXKB8NXrL1W69TG20EvOz7aevx6FTJIaBuBkYxS8zolg==", "dev": true, "license": "MIT", "dependencies": { - "@jest/transform": "^29.7.0", - "@types/babel__core": "^7.1.14", - "babel-plugin-istanbul": "^6.1.1", - "babel-preset-jest": "^29.6.3", - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", + "@jest/transform": "30.0.5", + "@types/babel__core": "^7.20.5", + "babel-plugin-istanbul": "^7.0.0", + "babel-preset-jest": "30.0.1", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", "slash": "^3.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { - "@babel/core": "^7.8.0" + "@babel/core": "^7.11.0" } }, "node_modules/babel-jest/node_modules/ansi-styles": { @@ -3460,63 +4036,35 @@ } }, "node_modules/babel-plugin-istanbul": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", - "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.0.tgz", + "integrity": "sha512-C5OzENSx/A+gt7t4VH1I2XsflxyPUmXRFPKBxt33xncdOmq7oROVM3bZv9Ysjjkv8OJYDMa+tKuKMvqU/H3xdw==", "dev": true, "license": "BSD-3-Clause", "dependencies": { "@babel/helper-plugin-utils": "^7.0.0", "@istanbuljs/load-nyc-config": "^1.0.0", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-instrument": "^5.0.4", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-instrument": "^6.0.2", "test-exclude": "^6.0.0" }, "engines": { - "node": ">=8" - } - }, - "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", - "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/core": "^7.12.3", - "@babel/parser": "^7.14.7", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-coverage": "^3.2.0", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/babel-plugin-istanbul/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" + "node": ">=12" } }, "node_modules/babel-plugin-jest-hoist": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", - "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-30.0.1.tgz", + "integrity": "sha512-zTPME3pI50NsFW8ZBaVIOeAxzEY7XHlmWeXXu9srI+9kNfzCUTy8MFan46xOGZY8NZThMqq+e3qZUKsvXbasnQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/template": "^7.3.3", - "@babel/types": "^7.3.3", - "@types/babel__core": "^7.1.14", - "@types/babel__traverse": "^7.0.6" + "@babel/template": "^7.27.2", + "@babel/types": "^7.27.3", + "@types/babel__core": "^7.20.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/babel-preset-current-node-syntax": { @@ -3547,20 +4095,20 @@ } }, "node_modules/babel-preset-jest": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", - "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-30.0.1.tgz", + "integrity": "sha512-+YHejD5iTWI46cZmcc/YtX4gaKBtdqCHCVfuVinizVpbmyjO3zYmeuyFdfA8duRqQZfgCAMlsfmkVbJ+e2MAJw==", "dev": true, "license": "MIT", "dependencies": { - "babel-plugin-jest-hoist": "^29.6.3", - "babel-preset-current-node-syntax": "^1.0.0" + "babel-plugin-jest-hoist": "30.0.1", + "babel-preset-current-node-syntax": "^1.1.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { - "@babel/core": "^7.0.0" + "@babel/core": "^7.11.0" } }, "node_modules/balanced-match": { @@ -3630,9 +4178,9 @@ } }, "node_modules/browserslist": { - "version": "4.24.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", - "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", + "version": "4.25.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.3.tgz", + "integrity": "sha512-cDGv1kkDI4/0e5yON9yM5G/0A5u8sf5TnmdX5C9qHzI9PPu++sQ9zjm1k9NiOrf3riY4OkK0zSGqfvJyJsgCBQ==", "dev": true, "funding": [ { @@ -3650,10 +4198,10 @@ ], "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001688", - "electron-to-chromium": "^1.5.73", + "caniuse-lite": "^1.0.30001735", + "electron-to-chromium": "^1.5.204", "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.1" + "update-browserslist-db": "^1.1.3" }, "bin": { "browserslist": "cli.js" @@ -3789,9 +4337,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001700", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001700.tgz", - "integrity": "sha512-2S6XIXwaE7K7erT8dY+kLQcpa5ms63XlRkMkReXjle+kf6c5g38vyMl+Z5y8dSxOFDhcFe+nxnn261PLxBSQsQ==", + "version": "1.0.30001735", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001735.tgz", + "integrity": "sha512-EV/laoX7Wq2J9TQlyIXRxTJqIw4sxfXS4OYgudGxBYRuTv0q7AM6yMEpU/Vo1I94thg9U6EZ2NfZx9GJq83u7w==", "dev": true, "funding": [ { @@ -3855,9 +4403,9 @@ "dev": true }, "node_modules/cjs-module-lexer": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", - "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-2.1.0.tgz", + "integrity": "sha512-UX0OwmYRYQQetfrLEZeewIFFI+wSTofC+pMBLNuH3RUuu/xzG1oz84UCEDOSoQlN3fZ4+AzmV50ZYvGqkMh9yA==", "dev": true, "license": "MIT" }, @@ -4330,104 +4878,6 @@ "node": ">=4" } }, - "node_modules/create-jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", - "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "exit": "^0.1.2", - "graceful-fs": "^4.2.9", - "jest-config": "^29.7.0", - "jest-util": "^29.7.0", - "prompts": "^2.0.1" - }, - "bin": { - "create-jest": "bin/create-jest.js" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/create-jest/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/create-jest/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/create-jest/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/create-jest/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true, - "license": "MIT" - }, - "node_modules/create-jest/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/create-jest/node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/create-require": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", @@ -4652,16 +5102,6 @@ "node": ">=0.3.1" } }, - "node_modules/diff-sequences": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", - "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, "node_modules/dot-prop": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", @@ -4715,26 +5155,17 @@ "util-deprecate": "~1.0.1" } }, - "node_modules/ejs": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", - "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", "dev": true, - "license": "Apache-2.0", - "dependencies": { - "jake": "^10.8.5" - }, - "bin": { - "ejs": "bin/cli.js" - }, - "engines": { - "node": ">=0.10.0" - } + "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.5.102", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.102.tgz", - "integrity": "sha512-eHhqaja8tE/FNpIiBrvBjFV/SSKpyWHLvxuR9dPTdo+3V9ppdLmFB7ZZQ98qNovcngPLYIz0oOBF9P0FfZef5Q==", + "version": "1.5.207", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.207.tgz", + "integrity": "sha512-mryFrrL/GXDTmAtIVMVf+eIXM09BBPlO5IQ7lUyKmK8d+A4VpRGG+M3ofoVef6qyF8s60rJei8ymlJxjUA8Faw==", "dev": true, "license": "ISC" }, @@ -5367,11 +5798,12 @@ "node": ">=6" } }, - "node_modules/exit": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", - "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "node_modules/exit-x": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/exit-x/-/exit-x-0.2.2.tgz", + "integrity": "sha512-+I6B/IkJc1o/2tiURyz/ivu/O0nKNEArIUB5O7zBrlDVJr22SCLH3xTeEry428LvFhRzIA1g8izguxJ/gbNcVQ==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8.0" } @@ -5399,20 +5831,21 @@ } }, "node_modules/expect": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", - "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/expect/-/expect-30.0.5.tgz", + "integrity": "sha512-P0te2pt+hHI5qLJkIR+iMvS+lYUZml8rKKsohVHAGY+uClp9XVbdyYNJOIjSRpHVp8s8YqxJCiHUkSYZGr8rtQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/expect-utils": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0" + "@jest/expect-utils": "30.0.5", + "@jest/get-type": "30.0.1", + "jest-matcher-utils": "30.0.5", + "jest-message-util": "30.0.5", + "jest-mock": "30.0.5", + "jest-util": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/external-editor": { @@ -5519,39 +5952,6 @@ "node": ">=16.0.0" } }, - "node_modules/filelist": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", - "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "minimatch": "^5.0.1" - } - }, - "node_modules/filelist/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/filelist/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/fill-range": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", @@ -5618,34 +6018,128 @@ "keyv": "^4.5.4" }, "engines": { - "node": ">=16" + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/follow-redirects": { + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/foreground-child/node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/foreground-child/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/foreground-child/node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/foreground-child/node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" } }, - "node_modules/flatted": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", - "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "node_modules/foreground-child/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", "dev": true, - "license": "ISC" - }, - "node_modules/follow-redirects": { - "version": "1.15.9", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", - "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "license": "MIT", + "license": "ISC", "engines": { - "node": ">=4.0" + "node": ">=14" }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/foreground-child/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" } }, "node_modules/form-data": { @@ -5979,6 +6473,28 @@ "dev": true, "license": "MIT" }, + "node_modules/handlebars": { + "version": "4.7.8", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", + "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" + }, + "engines": { + "node": ">=0.4.7" + }, + "optionalDependencies": { + "uglify-js": "^3.1.4" + } + }, "node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -6490,146 +7006,78 @@ } }, "node_modules/istanbul-lib-source-maps": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", - "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz", + "integrity": "sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==", "dev": true, "license": "BSD-3-Clause", "dependencies": { + "@jridgewell/trace-mapping": "^0.3.23", "debug": "^4.1.1", - "istanbul-lib-coverage": "^3.0.0", - "source-map": "^0.6.1" + "istanbul-lib-coverage": "^3.0.0" }, "engines": { "node": ">=10" } }, - "node_modules/istanbul-reports": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", - "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "html-escaper": "^2.0.0", - "istanbul-lib-report": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jake": { - "version": "10.9.2", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", - "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", + "node_modules/istanbul-lib-source-maps/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.30", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.30.tgz", + "integrity": "sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==", "dev": true, - "license": "Apache-2.0", + "license": "MIT", "dependencies": { - "async": "^3.2.3", - "chalk": "^4.0.2", - "filelist": "^1.0.4", - "minimatch": "^3.1.2" - }, - "bin": { - "jake": "bin/cli.js" - }, - "engines": { - "node": ">=10" + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" } }, - "node_modules/jake/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "node_modules/istanbul-reports": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz", + "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==", "dev": true, - "license": "MIT", + "license": "BSD-3-Clause", "dependencies": { - "color-convert": "^2.0.1" + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" }, "engines": { "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jake/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", "dev": true, - "license": "MIT", + "license": "BlueOak-1.0.0", "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" + "@isaacs/cliui": "^8.0.2" }, "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/jake/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/jake/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true, - "license": "MIT" - }, - "node_modules/jake/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/jake/node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" + "url": "https://github.com/sponsors/isaacs" }, - "engines": { - "node": ">=8" + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" } }, "node_modules/jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", - "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest/-/jest-30.0.5.tgz", + "integrity": "sha512-y2mfcJywuTUkvLm2Lp1/pFX8kTgMO5yyQGq/Sk/n2mN7XWYp4JsCZ/QXW34M8YScgk8bPZlREH04f6blPnoHnQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/core": "^29.7.0", - "@jest/types": "^29.6.3", - "import-local": "^3.0.2", - "jest-cli": "^29.7.0" + "@jest/core": "30.0.5", + "@jest/types": "30.0.5", + "import-local": "^3.2.0", + "jest-cli": "30.0.5" }, "bin": { "jest": "bin/jest.js" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" @@ -6641,18 +7089,18 @@ } }, "node_modules/jest-changed-files": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", - "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-30.0.5.tgz", + "integrity": "sha512-bGl2Ntdx0eAwXuGpdLdVYVr5YQHnSZlQ0y9HVDu565lCUAe9sj6JOtBbMmBBikGIegne9piDDIOeiLVoqTkz4A==", "dev": true, "license": "MIT", "dependencies": { - "execa": "^5.0.0", - "jest-util": "^29.7.0", + "execa": "^5.1.1", + "jest-util": "30.0.5", "p-limit": "^3.1.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-changed-files/node_modules/cross-spawn": { @@ -6812,35 +7260,35 @@ } }, "node_modules/jest-circus": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", - "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-30.0.5.tgz", + "integrity": "sha512-h/sjXEs4GS+NFFfqBDYT7y5Msfxh04EwWLhQi0F8kuWpe+J/7tICSlswU8qvBqumR3kFgHbfu7vU6qruWWBPug==", "dev": true, "license": "MIT", "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/expect": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/environment": "30.0.5", + "@jest/expect": "30.0.5", + "@jest/test-result": "30.0.5", + "@jest/types": "30.0.5", "@types/node": "*", - "chalk": "^4.0.0", + "chalk": "^4.1.2", "co": "^4.6.0", - "dedent": "^1.0.0", - "is-generator-fn": "^2.0.0", - "jest-each": "^29.7.0", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", + "dedent": "^1.6.0", + "is-generator-fn": "^2.1.0", + "jest-each": "30.0.5", + "jest-matcher-utils": "30.0.5", + "jest-message-util": "30.0.5", + "jest-runtime": "30.0.5", + "jest-snapshot": "30.0.5", + "jest-util": "30.0.5", "p-limit": "^3.1.0", - "pretty-format": "^29.7.0", - "pure-rand": "^6.0.0", + "pretty-format": "30.0.5", + "pure-rand": "^7.0.0", "slash": "^3.0.0", - "stack-utils": "^2.0.3" + "stack-utils": "^2.0.6" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-circus/node_modules/ansi-styles": { @@ -6897,9 +7345,9 @@ "license": "MIT" }, "node_modules/jest-circus/node_modules/dedent": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.5.3.tgz", - "integrity": "sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.6.0.tgz", + "integrity": "sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA==", "dev": true, "license": "MIT", "peerDependencies": { @@ -6964,29 +7412,28 @@ } }, "node_modules/jest-cli": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", - "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-30.0.5.tgz", + "integrity": "sha512-Sa45PGMkBZzF94HMrlX4kUyPOwUpdZasaliKN3mifvDmkhLYqLLg8HQTzn6gq7vJGahFYMQjXgyJWfYImKZzOw==", "dev": true, "license": "MIT", "dependencies": { - "@jest/core": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "create-jest": "^29.7.0", - "exit": "^0.1.2", - "import-local": "^3.0.2", - "jest-config": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "yargs": "^17.3.1" + "@jest/core": "30.0.5", + "@jest/test-result": "30.0.5", + "@jest/types": "30.0.5", + "chalk": "^4.1.2", + "exit-x": "^0.2.2", + "import-local": "^3.2.0", + "jest-config": "30.0.5", + "jest-util": "30.0.5", + "jest-validate": "30.0.5", + "yargs": "^17.7.2" }, "bin": { "jest": "bin/jest.js" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" @@ -7159,46 +7606,52 @@ } }, "node_modules/jest-config": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", - "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.11.6", - "@jest/test-sequencer": "^29.7.0", - "@jest/types": "^29.6.3", - "babel-jest": "^29.7.0", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "deepmerge": "^4.2.2", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "jest-circus": "^29.7.0", - "jest-environment-node": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-runner": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "micromatch": "^4.0.4", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-30.0.5.tgz", + "integrity": "sha512-aIVh+JNOOpzUgzUnPn5FLtyVnqc3TQHVMupYtyeURSb//iLColiMIR8TxCIDKyx9ZgjKnXGucuW68hCxgbrwmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/get-type": "30.0.1", + "@jest/pattern": "30.0.1", + "@jest/test-sequencer": "30.0.5", + "@jest/types": "30.0.5", + "babel-jest": "30.0.5", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "deepmerge": "^4.3.1", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "jest-circus": "30.0.5", + "jest-docblock": "30.0.1", + "jest-environment-node": "30.0.5", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.0.5", + "jest-runner": "30.0.5", + "jest-util": "30.0.5", + "jest-validate": "30.0.5", + "micromatch": "^4.0.8", "parse-json": "^5.2.0", - "pretty-format": "^29.7.0", + "pretty-format": "30.0.5", "slash": "^3.0.0", "strip-json-comments": "^3.1.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { "@types/node": "*", + "esbuild-register": ">=3.4.0", "ts-node": ">=9.0.0" }, "peerDependenciesMeta": { "@types/node": { "optional": true }, + "esbuild-register": { + "optional": true + }, "ts-node": { "optional": true } @@ -7220,6 +7673,16 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/jest-config/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, "node_modules/jest-config/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -7238,9 +7701,9 @@ } }, "node_modules/jest-config/node_modules/ci-info": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", - "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.0.tgz", + "integrity": "sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ==", "dev": true, "funding": [ { @@ -7273,6 +7736,27 @@ "dev": true, "license": "MIT" }, + "node_modules/jest-config/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/jest-config/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -7283,6 +7767,22 @@ "node": ">=8" } }, + "node_modules/jest-config/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/jest-config/node_modules/parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", @@ -7316,19 +7816,19 @@ } }, "node_modules/jest-diff": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", - "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-30.0.5.tgz", + "integrity": "sha512-1UIqE9PoEKaHcIKvq2vbibrCog4Y8G0zmOxgQUVEiTqwR5hJVMCoDsN1vFvI5JvwD37hjueZ1C4l2FyGnfpE0A==", "dev": true, "license": "MIT", "dependencies": { - "chalk": "^4.0.0", - "diff-sequences": "^29.6.3", - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" + "@jest/diff-sequences": "30.0.1", + "@jest/get-type": "30.0.1", + "chalk": "^4.1.2", + "pretty-format": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-diff/node_modules/ansi-styles": { @@ -7408,33 +7908,33 @@ } }, "node_modules/jest-docblock": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", - "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-30.0.1.tgz", + "integrity": "sha512-/vF78qn3DYphAaIc3jy4gA7XSAz167n9Bm/wn/1XhTLW7tTBIzXtCJpb/vcmc73NIIeeohCbdL94JasyXUZsGA==", "dev": true, "license": "MIT", "dependencies": { - "detect-newline": "^3.0.0" + "detect-newline": "^3.1.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-each": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", - "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-30.0.5.tgz", + "integrity": "sha512-dKjRsx1uZ96TVyejD3/aAWcNKy6ajMaN531CwWIsrazIqIoXI9TnnpPlkrEYku/8rkS3dh2rbH+kMOyiEIv0xQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "jest-get-type": "^29.6.3", - "jest-util": "^29.7.0", - "pretty-format": "^29.7.0" + "@jest/get-type": "30.0.1", + "@jest/types": "30.0.5", + "chalk": "^4.1.2", + "jest-util": "30.0.5", + "pretty-format": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-each/node_modules/ansi-styles": { @@ -7514,87 +8014,77 @@ } }, "node_modules/jest-environment-node": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", - "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-30.0.5.tgz", + "integrity": "sha512-ppYizXdLMSvciGsRsMEnv/5EFpvOdXBaXRBzFUDPWrsfmog4kYrOGWXarLllz6AXan6ZAA/kYokgDWuos1IKDA==", "dev": true, "license": "MIT", "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/fake-timers": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/environment": "30.0.5", + "@jest/fake-timers": "30.0.5", + "@jest/types": "30.0.5", "@types/node": "*", - "jest-mock": "^29.7.0", - "jest-util": "^29.7.0" + "jest-mock": "30.0.5", + "jest-util": "30.0.5", + "jest-validate": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-get-type": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", - "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-haste-map": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", - "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.0.5.tgz", + "integrity": "sha512-dkmlWNlsTSR0nH3nRfW5BKbqHefLZv0/6LCccG0xFCTWcJu8TuEwG+5Cm75iBfjVoockmO6J35o5gxtFSn5xeg==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", - "@types/graceful-fs": "^4.1.3", + "@jest/types": "30.0.5", "@types/node": "*", - "anymatch": "^3.0.3", - "fb-watchman": "^2.0.0", - "graceful-fs": "^4.2.9", - "jest-regex-util": "^29.6.3", - "jest-util": "^29.7.0", - "jest-worker": "^29.7.0", - "micromatch": "^4.0.4", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.0.5", + "jest-worker": "30.0.5", + "micromatch": "^4.0.8", "walker": "^1.0.8" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" }, "optionalDependencies": { - "fsevents": "^2.3.2" + "fsevents": "^2.3.3" } }, "node_modules/jest-leak-detector": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", - "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-30.0.5.tgz", + "integrity": "sha512-3Uxr5uP8jmHMcsOtYMRB/zf1gXN3yUIc+iPorhNETG54gErFIiUhLvyY/OggYpSMOEYqsmRxmuU4ZOoX5jpRFg==", "dev": true, "license": "MIT", "dependencies": { - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" + "@jest/get-type": "30.0.1", + "pretty-format": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-matcher-utils": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", - "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-30.0.5.tgz", + "integrity": "sha512-uQgGWt7GOrRLP1P7IwNWwK1WAQbq+m//ZY0yXygyfWp0rJlksMSLQAA4wYQC3b6wl3zfnchyTx+k3HZ5aPtCbQ==", "dev": true, "license": "MIT", "dependencies": { - "chalk": "^4.0.0", - "jest-diff": "^29.7.0", - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" + "@jest/get-type": "30.0.1", + "chalk": "^4.1.2", + "jest-diff": "30.0.5", + "pretty-format": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-matcher-utils/node_modules/ansi-styles": { @@ -7674,24 +8164,24 @@ } }, "node_modules/jest-message-util": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", - "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-30.0.5.tgz", + "integrity": "sha512-NAiDOhsK3V7RU0Aa/HnrQo+E4JlbarbmI3q6Pi4KcxicdtjV82gcIUrejOtczChtVQR4kddu1E1EJlW6EN9IyA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.12.13", - "@jest/types": "^29.6.3", - "@types/stack-utils": "^2.0.0", - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "micromatch": "^4.0.4", - "pretty-format": "^29.7.0", + "@babel/code-frame": "^7.27.1", + "@jest/types": "30.0.5", + "@types/stack-utils": "^2.0.3", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "micromatch": "^4.0.8", + "pretty-format": "30.0.5", "slash": "^3.0.0", - "stack-utils": "^2.0.3" + "stack-utils": "^2.0.6" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-message-util/node_modules/ansi-styles": { @@ -7771,18 +8261,18 @@ } }, "node_modules/jest-mock": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", - "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-30.0.5.tgz", + "integrity": "sha512-Od7TyasAAQX/6S+QCbN6vZoWOMwlTtzzGuxJku1GhGanAjz9y+QsQkpScDmETvdc9aSXyJ/Op4rhpMYBWW91wQ==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", + "@jest/types": "30.0.5", "@types/node": "*", - "jest-util": "^29.7.0" + "jest-util": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-pnp-resolver": { @@ -7804,48 +8294,47 @@ } }, "node_modules/jest-regex-util": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", - "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", "dev": true, "license": "MIT", "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-resolve": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", - "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-30.0.5.tgz", + "integrity": "sha512-d+DjBQ1tIhdz91B79mywH5yYu76bZuE96sSbxj8MkjWVx5WNdt1deEFRONVL4UkKLSrAbMkdhb24XN691yDRHg==", "dev": true, "license": "MIT", "dependencies": { - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-pnp-resolver": "^1.2.2", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "resolve": "^1.20.0", - "resolve.exports": "^2.0.0", - "slash": "^3.0.0" + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.0.5", + "jest-pnp-resolver": "^1.2.3", + "jest-util": "30.0.5", + "jest-validate": "30.0.5", + "slash": "^3.0.0", + "unrs-resolver": "^1.7.11" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-resolve-dependencies": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", - "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-30.0.5.tgz", + "integrity": "sha512-/xMvBR4MpwkrHW4ikZIWRttBBRZgWK4d6xt3xW1iRDSKt4tXzYkMkyPfBnSCgv96cpkrctfXs6gexeqMYqdEpw==", "dev": true, "license": "MIT", "dependencies": { - "jest-regex-util": "^29.6.3", - "jest-snapshot": "^29.7.0" + "jest-regex-util": "30.0.1", + "jest-snapshot": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-resolve/node_modules/ansi-styles": { @@ -7925,36 +8414,37 @@ } }, "node_modules/jest-runner": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", - "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-30.0.5.tgz", + "integrity": "sha512-JcCOucZmgp+YuGgLAXHNy7ualBx4wYSgJVWrYMRBnb79j9PD0Jxh0EHvR5Cx/r0Ce+ZBC4hCdz2AzFFLl9hCiw==", "dev": true, "license": "MIT", "dependencies": { - "@jest/console": "^29.7.0", - "@jest/environment": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/console": "30.0.5", + "@jest/environment": "30.0.5", + "@jest/test-result": "30.0.5", + "@jest/transform": "30.0.5", + "@jest/types": "30.0.5", "@types/node": "*", - "chalk": "^4.0.0", + "chalk": "^4.1.2", "emittery": "^0.13.1", - "graceful-fs": "^4.2.9", - "jest-docblock": "^29.7.0", - "jest-environment-node": "^29.7.0", - "jest-haste-map": "^29.7.0", - "jest-leak-detector": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-resolve": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-util": "^29.7.0", - "jest-watcher": "^29.7.0", - "jest-worker": "^29.7.0", + "exit-x": "^0.2.2", + "graceful-fs": "^4.2.11", + "jest-docblock": "30.0.1", + "jest-environment-node": "30.0.5", + "jest-haste-map": "30.0.5", + "jest-leak-detector": "30.0.5", + "jest-message-util": "30.0.5", + "jest-resolve": "30.0.5", + "jest-runtime": "30.0.5", + "jest-util": "30.0.5", + "jest-watcher": "30.0.5", + "jest-worker": "30.0.5", "p-limit": "^3.1.0", "source-map-support": "0.5.13" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-runner/node_modules/ansi-styles": { @@ -8063,37 +8553,37 @@ } }, "node_modules/jest-runtime": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", - "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-30.0.5.tgz", + "integrity": "sha512-7oySNDkqpe4xpX5PPiJTe5vEa+Ak/NnNz2bGYZrA1ftG3RL3EFlHaUkA1Cjx+R8IhK0Vg43RML5mJedGTPNz3A==", "dev": true, "license": "MIT", "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/fake-timers": "^29.7.0", - "@jest/globals": "^29.7.0", - "@jest/source-map": "^29.6.3", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/environment": "30.0.5", + "@jest/fake-timers": "30.0.5", + "@jest/globals": "30.0.5", + "@jest/source-map": "30.0.1", + "@jest/test-result": "30.0.5", + "@jest/transform": "30.0.5", + "@jest/types": "30.0.5", "@types/node": "*", - "chalk": "^4.0.0", - "cjs-module-lexer": "^1.0.0", - "collect-v8-coverage": "^1.0.0", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-mock": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", + "chalk": "^4.1.2", + "cjs-module-lexer": "^2.1.0", + "collect-v8-coverage": "^1.0.2", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.0.5", + "jest-message-util": "30.0.5", + "jest-mock": "30.0.5", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.0.5", + "jest-snapshot": "30.0.5", + "jest-util": "30.0.5", "slash": "^3.0.0", "strip-bom": "^4.0.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-runtime/node_modules/ansi-styles": { @@ -8112,6 +8602,16 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/jest-runtime/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, "node_modules/jest-runtime/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -8149,6 +8649,27 @@ "dev": true, "license": "MIT" }, + "node_modules/jest-runtime/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/jest-runtime/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -8159,6 +8680,22 @@ "node": ">=8" } }, + "node_modules/jest-runtime/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/jest-runtime/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -8173,35 +8710,49 @@ } }, "node_modules/jest-snapshot": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", - "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-30.0.5.tgz", + "integrity": "sha512-T00dWU/Ek3LqTp4+DcW6PraVxjk28WY5Ua/s+3zUKSERZSNyxTqhDXCWKG5p2HAJ+crVQ3WJ2P9YVHpj1tkW+g==", "dev": true, "license": "MIT", "dependencies": { - "@babel/core": "^7.11.6", - "@babel/generator": "^7.7.2", - "@babel/plugin-syntax-jsx": "^7.7.2", - "@babel/plugin-syntax-typescript": "^7.7.2", - "@babel/types": "^7.3.3", - "@jest/expect-utils": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "babel-preset-current-node-syntax": "^1.0.0", - "chalk": "^4.0.0", - "expect": "^29.7.0", - "graceful-fs": "^4.2.9", - "jest-diff": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "natural-compare": "^1.4.0", - "pretty-format": "^29.7.0", - "semver": "^7.5.3" + "@babel/core": "^7.27.4", + "@babel/generator": "^7.27.5", + "@babel/plugin-syntax-jsx": "^7.27.1", + "@babel/plugin-syntax-typescript": "^7.27.1", + "@babel/types": "^7.27.3", + "@jest/expect-utils": "30.0.5", + "@jest/get-type": "30.0.1", + "@jest/snapshot-utils": "30.0.5", + "@jest/transform": "30.0.5", + "@jest/types": "30.0.5", + "babel-preset-current-node-syntax": "^1.1.0", + "chalk": "^4.1.2", + "expect": "30.0.5", + "graceful-fs": "^4.2.11", + "jest-diff": "30.0.5", + "jest-matcher-utils": "30.0.5", + "jest-message-util": "30.0.5", + "jest-util": "30.0.5", + "pretty-format": "30.0.5", + "semver": "^7.7.2", + "synckit": "^0.11.8" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/@pkgr/core": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.9.tgz", + "integrity": "sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/pkgr" } }, "node_modules/jest-snapshot/node_modules/ansi-styles": { @@ -8280,22 +8831,38 @@ "node": ">=8" } }, + "node_modules/jest-snapshot/node_modules/synckit": { + "version": "0.11.11", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.11.tgz", + "integrity": "sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@pkgr/core": "^0.2.9" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/synckit" + } + }, "node_modules/jest-util": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", - "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.0.5.tgz", + "integrity": "sha512-pvyPWssDZR0FlfMxCBoc0tvM8iUEskaRFALUtGQYzVEAqisAztmy+R8LnU14KT4XA0H/a5HMVTXat1jLne010g==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", + "@jest/types": "30.0.5", "@types/node": "*", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "graceful-fs": "^4.2.9", - "picomatch": "^2.2.3" + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-util/node_modules/ansi-styles": { @@ -8332,9 +8899,9 @@ } }, "node_modules/jest-util/node_modules/ci-info": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", - "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.0.tgz", + "integrity": "sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ==", "dev": true, "funding": [ { @@ -8377,6 +8944,19 @@ "node": ">=8" } }, + "node_modules/jest-util/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/jest-util/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -8391,21 +8971,21 @@ } }, "node_modules/jest-validate": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", - "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-30.0.5.tgz", + "integrity": "sha512-ouTm6VFHaS2boyl+k4u+Qip4TSH7Uld5tyD8psQ8abGgt2uYYB8VwVfAHWHjHc0NWmGGbwO5h0sCPOGHHevefw==", "dev": true, "license": "MIT", "dependencies": { - "@jest/types": "^29.6.3", - "camelcase": "^6.2.0", - "chalk": "^4.0.0", - "jest-get-type": "^29.6.3", + "@jest/get-type": "30.0.1", + "@jest/types": "30.0.5", + "camelcase": "^6.3.0", + "chalk": "^4.1.2", "leven": "^3.1.0", - "pretty-format": "^29.7.0" + "pretty-format": "30.0.5" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-validate/node_modules/ansi-styles": { @@ -8498,23 +9078,23 @@ } }, "node_modules/jest-watcher": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", - "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-30.0.5.tgz", + "integrity": "sha512-z9slj/0vOwBDBjN3L4z4ZYaA+pG56d6p3kTUhFRYGvXbXMWhXmb/FIxREZCD06DYUwDKKnj2T80+Pb71CQ0KEg==", "dev": true, "license": "MIT", "dependencies": { - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", + "@jest/test-result": "30.0.5", + "@jest/types": "30.0.5", "@types/node": "*", - "ansi-escapes": "^4.2.1", - "chalk": "^4.0.0", + "ansi-escapes": "^4.3.2", + "chalk": "^4.1.2", "emittery": "^0.13.1", - "jest-util": "^29.7.0", - "string-length": "^4.0.1" + "jest-util": "30.0.5", + "string-length": "^4.0.2" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-watcher/node_modules/ansi-styles": { @@ -8594,19 +9174,20 @@ } }, "node_modules/jest-worker": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", - "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.0.5.tgz", + "integrity": "sha512-ojRXsWzEP16NdUuBw/4H/zkZdHOa7MMYCk4E430l+8fELeLg/mqmMlRhjL7UNZvQrDmnovWZV4DxX03fZF48fQ==", "dev": true, "license": "MIT", "dependencies": { "@types/node": "*", - "jest-util": "^29.7.0", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.0.5", "merge-stream": "^2.0.0", - "supports-color": "^8.0.0" + "supports-color": "^8.1.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/jest-worker/node_modules/has-flag": { @@ -8775,16 +9356,6 @@ "json-buffer": "3.0.1" } }, - "node_modules/kleur": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", - "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", @@ -9260,12 +9831,35 @@ "dev": true, "license": "MIT" }, + "node_modules/napi-postinstall": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.3.3.tgz", + "integrity": "sha512-uTp172LLXSxuSYHv/kou+f6KW3SMppU9ivthaVTXian9sOt3XM/zHYHpRZiLgQoxeWfYUnslNWQHF1+G71xcow==", + "dev": true, + "license": "MIT", + "bin": { + "napi-postinstall": "lib/cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/napi-postinstall" + } + }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", "dev": true }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true, + "license": "MIT" + }, "node_modules/nice-try": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", @@ -9647,6 +10241,13 @@ "node": ">=6" } }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true, + "license": "BlueOak-1.0.0" + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -9712,6 +10313,30 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -9732,9 +10357,9 @@ } }, "node_modules/pirates": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", - "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", "dev": true, "license": "MIT", "engines": { @@ -9874,18 +10499,18 @@ } }, "node_modules/pretty-format": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", - "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-30.0.5.tgz", + "integrity": "sha512-D1tKtYvByrBkFLe2wHJl2bwMJIiT8rW+XA+TiataH79/FszLQMrpGEvzUVkzPau7OCO0Qnrhpe87PqtOAIB8Yw==", "dev": true, "license": "MIT", "dependencies": { - "@jest/schemas": "^29.6.3", - "ansi-styles": "^5.0.0", - "react-is": "^18.0.0" + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/pretty-format/node_modules/ansi-styles": { @@ -9918,20 +10543,6 @@ "node": ">=0.4.0" } }, - "node_modules/prompts": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", - "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "kleur": "^3.0.3", - "sisteransi": "^1.0.5" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/propagate": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", @@ -9966,9 +10577,9 @@ } }, "node_modules/pure-rand": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", - "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-7.0.1.tgz", + "integrity": "sha512-oTUZM/NAZS8p7ANR3SHh30kXB+zK2r2BPcEn/awJIbOvq82WoMN4p62AWWp3Hhw50G0xMsw1mhIBLqHw64EcNQ==", "dev": true, "funding": [ { @@ -10171,16 +10782,6 @@ "node": ">=4" } }, - "node_modules/resolve.exports": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", - "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, "node_modules/reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", @@ -10271,9 +10872,9 @@ } }, "node_modules/semver": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", - "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", "dev": true, "license": "ISC", "bin": { @@ -10368,13 +10969,6 @@ "simple-concat": "^1.0.0" } }, - "node_modules/sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", - "dev": true, - "license": "MIT" - }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -10565,6 +11159,52 @@ "node": ">=8" } }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/string-width/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", @@ -10603,6 +11243,20 @@ "node": ">=6" } }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/strip-ansi/node_modules/ansi-regex": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", @@ -10924,20 +11578,20 @@ } }, "node_modules/ts-jest": { - "version": "29.2.5", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.5.tgz", - "integrity": "sha512-KD8zB2aAZrcKIdGk4OwpJggeLcH1FgrICqDSROWqlnJXGCXK4Mn6FcdK2B6670Xr73lHMG1kHw8R87A0ecZ+vA==", + "version": "29.4.1", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.4.1.tgz", + "integrity": "sha512-SaeUtjfpg9Uqu8IbeDKtdaS0g8lS6FT6OzM3ezrDfErPJPHNDo/Ey+VFGP1bQIDfagYDLyRpd7O15XpG1Es2Uw==", "dev": true, "license": "MIT", "dependencies": { "bs-logger": "^0.2.6", - "ejs": "^3.1.10", "fast-json-stable-stringify": "^2.1.0", - "jest-util": "^29.0.0", + "handlebars": "^4.7.8", "json5": "^2.2.3", "lodash.memoize": "^4.1.2", "make-error": "^1.3.6", - "semver": "^7.6.3", + "semver": "^7.7.2", + "type-fest": "^4.41.0", "yargs-parser": "^21.1.1" }, "bin": { @@ -10948,10 +11602,11 @@ }, "peerDependencies": { "@babel/core": ">=7.0.0-beta.0 <8", - "@jest/transform": "^29.0.0", - "@jest/types": "^29.0.0", - "babel-jest": "^29.0.0", - "jest": "^29.0.0", + "@jest/transform": "^29.0.0 || ^30.0.0", + "@jest/types": "^29.0.0 || ^30.0.0", + "babel-jest": "^29.0.0 || ^30.0.0", + "jest": "^29.0.0 || ^30.0.0", + "jest-util": "^29.0.0 || ^30.0.0", "typescript": ">=4.3 <6" }, "peerDependenciesMeta": { @@ -10969,9 +11624,25 @@ }, "esbuild": { "optional": true + }, + "jest-util": { + "optional": true } } }, + "node_modules/ts-jest/node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/ts-jest/node_modules/yargs-parser": { "version": "21.1.1", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", @@ -11100,6 +11771,20 @@ "node": ">=14.17" } }, + "node_modules/uglify-js": { + "version": "3.19.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", + "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", + "dev": true, + "license": "BSD-2-Clause", + "optional": true, + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/undici-types": { "version": "6.19.8", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", @@ -11129,6 +11814,41 @@ "node": ">= 10.0.0" } }, + "node_modules/unrs-resolver": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/unrs-resolver/-/unrs-resolver-1.11.1.tgz", + "integrity": "sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "napi-postinstall": "^0.3.0" + }, + "funding": { + "url": "https://opencollective.com/unrs-resolver" + }, + "optionalDependencies": { + "@unrs/resolver-binding-android-arm-eabi": "1.11.1", + "@unrs/resolver-binding-android-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-x64": "1.11.1", + "@unrs/resolver-binding-freebsd-x64": "1.11.1", + "@unrs/resolver-binding-linux-arm-gnueabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm-musleabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-arm64-musl": "1.11.1", + "@unrs/resolver-binding-linux-ppc64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-musl": "1.11.1", + "@unrs/resolver-binding-linux-s390x-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-musl": "1.11.1", + "@unrs/resolver-binding-wasm32-wasi": "1.11.1", + "@unrs/resolver-binding-win32-arm64-msvc": "1.11.1", + "@unrs/resolver-binding-win32-ia32-msvc": "1.11.1", + "@unrs/resolver-binding-win32-x64-msvc": "1.11.1" + } + }, "node_modules/unzipper": { "version": "0.12.3", "resolved": "https://registry.npmjs.org/unzipper/-/unzipper-0.12.3.tgz", @@ -11159,9 +11879,9 @@ } }, "node_modules/update-browserslist-db": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.2.tgz", - "integrity": "sha512-PPypAm5qvlD7XMZC3BujecnaOxwhrtoFR+Dqkk5Aa/6DssiH0ibKoketaj9w8LP7Bont1rYeoV5plxD7RTEPRg==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", "dev": true, "funding": [ { @@ -11235,9 +11955,9 @@ } }, "node_modules/v8-to-istanbul/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "version": "0.3.30", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.30.tgz", + "integrity": "sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==", "dev": true, "license": "MIT", "dependencies": { @@ -11302,6 +12022,13 @@ "node": ">=0.10.0" } }, + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", + "dev": true, + "license": "MIT" + }, "node_modules/wrap-ansi": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", @@ -11315,6 +12042,74 @@ "node": ">=6" } }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/wrap-ansi/node_modules/string-width": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", @@ -11334,17 +12129,30 @@ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "node_modules/write-file-atomic": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", - "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", "dev": true, "license": "ISC", "dependencies": { "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.7" + "signal-exit": "^4.0.1" }, "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/write-file-atomic/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/y18n": { diff --git a/package.json b/package.json index 3a8fe646..d1e152e7 100644 --- a/package.json +++ b/package.json @@ -81,7 +81,8 @@ "@eslint/js": "^9.20.0", "@types/cli-progress": "^3.11.6", "@types/fs-extra": "^9.0.13", - "@types/jest": "^29.5.14", + "@types/graceful-fs": "^4.1.9", + "@types/jest": "^30.0.0", "@types/lodash": "^4.14.144", "@types/node": "^20.17.19", "@types/node-fetch": "^2.5.7", @@ -100,10 +101,10 @@ "eslint-plugin-prettier": "^5.2.3", "globals": "^15.15.0", "husky": "^3.0.5", - "jest": "^29.7.0", + "jest": "^30.0.5", "nock": "^12.0.3", "prettier": "^3.5.1", - "ts-jest": "^29.2.5", + "ts-jest": "^29.4.1", "ts-node": "^10.9.1", "typescript": "^5.7.3" }, diff --git a/src/commands/configure.spec.ts b/src/commands/configure.spec.ts index 88177c2a..65239135 100644 --- a/src/commands/configure.spec.ts +++ b/src/commands/configure.spec.ts @@ -97,7 +97,7 @@ describe('configure command', function () { expect(() => { handler({ ...yargArgs, ...configFixture, config: CONFIG_FILENAME() }); - }).toThrowError(/^Unable to create dir ".*". Reason: .*/); + }).toThrow(/^Unable to create dir ".*". Reason: .*/); expect(fs.existsSync).toHaveBeenCalledWith(expect.stringMatching(/\.amplience$/)); expect(fs.mkdirSync).toHaveBeenCalledWith(expect.stringMatching(/\.amplience$/), { recursive: true }); @@ -113,7 +113,7 @@ describe('configure command', function () { expect(() => { handler({ ...yargArgs, ...configFixture, config: CONFIG_FILENAME() }); - }).toThrowError(/^Unable to write config file ".*". Reason: .*/); + }).toThrow(/^Unable to write config file ".*". Reason: .*/); expect(fs.existsSync).toHaveBeenCalledWith(expect.stringMatching(/\.amplience$/)); expect(fs.mkdirSync).not.toHaveBeenCalledWith(); diff --git a/src/commands/content-item/archive.spec.ts b/src/commands/content-item/archive.spec.ts index 73c4e4f7..8812b6cd 100644 --- a/src/commands/content-item/archive.spec.ts +++ b/src/commands/content-item/archive.spec.ts @@ -342,7 +342,7 @@ describe('content-item archive command', () => { status: Status.ACTIVE, enrichItems: true }); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); }); it('should archive content by id', async () => { @@ -361,7 +361,7 @@ describe('content-item archive command', () => { expect(mockItemGetById).toHaveBeenCalled(); expect(mockFacet).not.toHaveBeenCalled(); - expect(mockArchive).toBeCalledTimes(1); + expect(mockArchive).toHaveBeenCalledTimes(1); }); it("shouldn't archive content by id", async () => { @@ -379,7 +379,7 @@ describe('content-item archive command', () => { expect(mockItemGetById).toHaveBeenCalled(); expect(mockFacet).not.toHaveBeenCalled(); - expect(mockArchive).not.toBeCalled(); + expect(mockArchive).not.toHaveBeenCalled(); }); it('should archive content by repo id', async () => { @@ -401,7 +401,7 @@ describe('content-item archive command', () => { repoId: 'repo1', enrichItems: true }); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); }); it('should archive content by repo ids', async () => { @@ -423,7 +423,7 @@ describe('content-item archive command', () => { repoId: ['repo1', 'repo2'], enrichItems: true }); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); }); it('should archive content by folder id', async () => { @@ -447,7 +447,7 @@ describe('content-item archive command', () => { repoId: 'repo123', enrichItems: true }); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); }); it('should archive content by folder ids', async () => { @@ -469,7 +469,7 @@ describe('content-item archive command', () => { folderId: ['folder1', 'folder1'], enrichItems: true }); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); }); it('should archive content by name', async () => { @@ -492,7 +492,7 @@ describe('content-item archive command', () => { status: Status.ACTIVE, enrichItems: true }); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); }); it('should exit if a facet AND id are provided', async () => { @@ -509,10 +509,10 @@ describe('content-item archive command', () => { }; await handler(argv); - expect(mockFacet).not.toBeCalled(); - expect(mockFolderGet).not.toBeCalled(); - expect(mockItemsList).not.toBeCalled(); - expect(mockArchive).not.toBeCalled(); + expect(mockFacet).not.toHaveBeenCalled(); + expect(mockFolderGet).not.toHaveBeenCalled(); + expect(mockItemsList).not.toHaveBeenCalled(); + expect(mockArchive).not.toHaveBeenCalled(); }); it("shouldn't unarchive content when facet returns none", async () => { @@ -538,7 +538,7 @@ describe('content-item archive command', () => { status: Status.ACTIVE, enrichItems: true }); - expect(mockArchive).not.toBeCalled(); + expect(mockArchive).not.toHaveBeenCalled(); }); it("shouldn't archive content, answer no", async () => { @@ -561,7 +561,7 @@ describe('content-item archive command', () => { status: Status.ACTIVE, enrichItems: true }); - expect(mockArchive).not.toBeCalled(); + expect(mockArchive).not.toHaveBeenCalled(); }); it('should archive content by content type name', async () => { @@ -582,7 +582,7 @@ describe('content-item archive command', () => { status: Status.ACTIVE, enrichItems: true }); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); }); it('should archive content with ignoreError', async () => { @@ -603,7 +603,7 @@ describe('content-item archive command', () => { status: Status.ACTIVE, enrichItems: true }); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); }); it("shouldn't archive content with ignoreError", async () => { @@ -624,7 +624,7 @@ describe('content-item archive command', () => { status: Status.ACTIVE, enrichItems: true }); - expect(mockArchive).toBeCalledTimes(1); + expect(mockArchive).toHaveBeenCalledTimes(1); }); it('should archive content items without asking if --force is provided', async () => { @@ -645,7 +645,7 @@ describe('content-item archive command', () => { status: Status.ACTIVE, enrichItems: true }); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); }); it('should archive content items specified in the provided --revertLog', async () => { @@ -680,7 +680,7 @@ describe('content-item archive command', () => { expect(mockItemGetById).toHaveBeenNthCalledWith(1, '1'); expect(mockItemGetById).toHaveBeenNthCalledWith(2, '2'); expect(mockItemGetById).toHaveBeenNthCalledWith(3, 'idMissing'); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); }); it("shouldn't archive content items, getFacet error", async () => { @@ -704,7 +704,7 @@ describe('content-item archive command', () => { status: Status.ACTIVE, enrichItems: true }); - expect(mockArchive).not.toBeCalled(); + expect(mockArchive).not.toHaveBeenCalled(); }); it("shouldn't archive content items, revertLog error", async () => { @@ -737,7 +737,7 @@ describe('content-item archive command', () => { expect(mockItemGetById).not.toHaveBeenCalled(); expect(mockFacet).not.toHaveBeenCalled(); - expect(mockArchive).not.toBeCalled(); + expect(mockArchive).not.toHaveBeenCalled(); }); it('should archive content items, write log file', async () => { @@ -762,7 +762,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockItemGetById).toHaveBeenCalled(); - expect(mockArchive).toBeCalled(); + expect(mockArchive).toHaveBeenCalled(); const logExists = await promisify(exists)(`temp_${process.env.JEST_WORKER_ID}/content-item-archive.log`); @@ -903,7 +903,7 @@ describe('content-item archive command', () => { logFile: createLog('./logFile.log') }); - expect(mockArchive).toBeCalledTimes(2); + expect(mockArchive).toHaveBeenCalledTimes(2); if (await promisify(exists)('./logFile.log')) { await promisify(unlink)('./logFile.log'); @@ -920,7 +920,7 @@ describe('content-item archive command', () => { logFile: new FileLog() }); - expect(console.log).toBeCalled(); + expect(console.log).toHaveBeenCalled(); expect(console.log).toHaveBeenLastCalledWith('Nothing found to archive, aborting.'); }); }); diff --git a/src/commands/content-item/unarchive.spec.ts b/src/commands/content-item/unarchive.spec.ts index c48f36c9..518e34e8 100644 --- a/src/commands/content-item/unarchive.spec.ts +++ b/src/commands/content-item/unarchive.spec.ts @@ -324,7 +324,7 @@ describe('content-item unarchive command', () => { expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ARCHIVED }); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); }); it('should unarchive content by id', async () => { @@ -342,7 +342,7 @@ describe('content-item unarchive command', () => { await handler(argv); expect(mockItemGetById).toHaveBeenCalled(); - expect(mockUnarchive).toBeCalledTimes(1); + expect(mockUnarchive).toHaveBeenCalledTimes(1); }); it("shouldn't unarchive content by id", async () => { @@ -359,7 +359,7 @@ describe('content-item unarchive command', () => { await handler(argv); expect(mockItemGetById).toHaveBeenCalled(); - expect(mockUnarchive).not.toBeCalled(); + expect(mockUnarchive).not.toHaveBeenCalled(); }); it('should unarchive content by repo id', async () => { @@ -379,7 +379,7 @@ describe('content-item unarchive command', () => { repoId: 'repo1', status: Status.ARCHIVED }); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); }); it('should unarchive content by repo ids', async () => { @@ -399,7 +399,7 @@ describe('content-item unarchive command', () => { repoId: ['repo1', 'repo2'], status: Status.ARCHIVED }); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); }); it('should unarchive content by folder id', async () => { @@ -421,7 +421,7 @@ describe('content-item unarchive command', () => { folderId: 'folder1', status: Status.ARCHIVED }); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); }); it('should unarchive content by folder ids', async () => { @@ -441,7 +441,7 @@ describe('content-item unarchive command', () => { folderId: ['folder1', 'folder1'], status: Status.ARCHIVED }); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); }); it('should unarchive content by name', async () => { @@ -462,7 +462,7 @@ describe('content-item unarchive command', () => { folderId: 'folder1', status: Status.ARCHIVED }); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); }); it('should exit if a facet AND id are provided', async () => { @@ -479,10 +479,10 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).not.toBeCalled(); - expect(mockFolderGet).not.toBeCalled(); - expect(mockItemsList).not.toBeCalled(); - expect(mockUnarchive).not.toBeCalled(); + expect(mockFacet).not.toHaveBeenCalled(); + expect(mockFolderGet).not.toHaveBeenCalled(); + expect(mockItemsList).not.toHaveBeenCalled(); + expect(mockUnarchive).not.toHaveBeenCalled(); }); it("shouldn't unarchive content when facet returns none", async () => { @@ -506,7 +506,7 @@ describe('content-item unarchive command', () => { folderId: 'folder1', status: Status.ARCHIVED }); - expect(mockUnarchive).not.toBeCalled(); + expect(mockUnarchive).not.toHaveBeenCalled(); }); it("shouldn't unarchive content, answer no", async () => { @@ -527,7 +527,7 @@ describe('content-item unarchive command', () => { folderId: 'folder1', status: Status.ARCHIVED }); - expect(mockUnarchive).not.toBeCalled(); + expect(mockUnarchive).not.toHaveBeenCalled(); }); it('should unarchive content by content type name', async () => { @@ -546,7 +546,7 @@ describe('content-item unarchive command', () => { expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'schema:http://test.com', { status: Status.ARCHIVED }); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); }); it('should unarchive content with ignoreError', async () => { @@ -565,7 +565,7 @@ describe('content-item unarchive command', () => { expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ARCHIVED }); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); }); it("shouldn't unarchive content with ignoreError", async () => { @@ -584,7 +584,7 @@ describe('content-item unarchive command', () => { expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ARCHIVED }); - expect(mockUnarchive).toBeCalledTimes(1); + expect(mockUnarchive).toHaveBeenCalledTimes(1); }); it('should unarchive content items without asking if --force is provided', async () => { @@ -603,7 +603,7 @@ describe('content-item unarchive command', () => { expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ARCHIVED }); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); }); it('should unarchive content items specified in the provided --revertLog', async () => { @@ -642,7 +642,7 @@ describe('content-item unarchive command', () => { expect(mockItemUpdate).toHaveBeenCalled(); const updateItem: ContentItem = (mockItemUpdate as jest.Mock).mock.calls[0][0]; expect(updateItem.body._meta.deliveryKey).toEqual('delivery-key'); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); }); it("shouldn't unarchive content items, getFacet error", async () => { @@ -665,7 +665,7 @@ describe('content-item unarchive command', () => { folderId: 'folder1', status: Status.ARCHIVED }); - expect(mockUnarchive).not.toBeCalled(); + expect(mockUnarchive).not.toHaveBeenCalled(); }); it("shouldn't unarchive content items, revertLog error", async () => { @@ -699,7 +699,7 @@ describe('content-item unarchive command', () => { expect(mockItemGetById).not.toHaveBeenCalled(); expect(mockFacet).not.toHaveBeenCalled(); - expect(mockUnarchive).not.toBeCalled(); + expect(mockUnarchive).not.toHaveBeenCalled(); }); it('should unarchive content items, write log file', async () => { @@ -722,7 +722,7 @@ describe('content-item unarchive command', () => { await handler(argv); expect(mockItemGetById).toHaveBeenCalled(); - expect(mockUnarchive).toBeCalled(); + expect(mockUnarchive).toHaveBeenCalled(); const logExists = await promisify(exists)(`temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`); @@ -885,7 +885,7 @@ describe('content-item unarchive command', () => { logFile: './logFile.log' }); - expect(mockUnarchive).toBeCalledTimes(2); + expect(mockUnarchive).toHaveBeenCalledTimes(2); if (await promisify(exists)('./logFile.log')) { await promisify(unlink)('./logFile.log'); @@ -901,7 +901,7 @@ describe('content-item unarchive command', () => { missingContent: false }); - expect(console.log).toBeCalled(); + expect(console.log).toHaveBeenCalled(); expect(console.log).toHaveBeenLastCalledWith('Nothing found to unarchive, aborting.'); }); }); diff --git a/src/commands/content-repository/list.spec.ts b/src/commands/content-repository/list.spec.ts index 14ab67ac..da49a194 100644 --- a/src/commands/content-repository/list.spec.ts +++ b/src/commands/content-repository/list.spec.ts @@ -99,8 +99,8 @@ describe('content-type-schema list command', (): void => { const argv = { ...yargArgs, ...config, ...pagingOptions }; await handler(argv); - expect(mockGetHub).toBeCalledWith('hub-id'); - expect(mockList).toBeCalledWith({ size: DEFAULT_SIZE, ...pagingOptions }); + expect(mockGetHub).toHaveBeenCalledWith('hub-id'); + expect(mockList).toHaveBeenCalledWith({ size: DEFAULT_SIZE, ...pagingOptions }); expect(mockDataPresenter).toHaveBeenCalledWith(plainListContentRepository); expect(mockDataPresenter.mock.instances[0].render).toHaveBeenCalledWith({ itemMapFn, json: argv.json }); diff --git a/src/commands/content-type-schema/list.spec.ts b/src/commands/content-type-schema/list.spec.ts index 936a8dad..d43e8e3f 100644 --- a/src/commands/content-type-schema/list.spec.ts +++ b/src/commands/content-type-schema/list.spec.ts @@ -64,8 +64,8 @@ describe('content-type-schema list command', (): void => { const argv = { ...yargArgs, ...config }; await handler(argv); - expect(mockGetHub).toBeCalledWith('hub-id'); - expect(mockList).toBeCalledWith({ size: DEFAULT_SIZE }); + expect(mockGetHub).toHaveBeenCalledWith('hub-id'); + expect(mockList).toHaveBeenCalledWith({ size: DEFAULT_SIZE }); expect(mockDataPresenter).toHaveBeenCalledWith(plainListContentTypeSchemas); expect(mockDataPresenter.mock.instances[0].render).toHaveBeenCalledWith({ itemMapFn, json: argv.json }); @@ -95,7 +95,7 @@ describe('content-type-schema list command', (): void => { const argv = { ...yargArgs, ...config }; await handler(argv); - expect(mockGetHub).toBeCalledWith('hub-id'); + expect(mockGetHub).toHaveBeenCalledWith('hub-id'); expect(mockList).toHaveBeenCalled(); expect(mockDataPresenter).toHaveBeenCalledTimes(0); }); diff --git a/src/commands/content-type/import.spec.ts b/src/commands/content-type/import.spec.ts index 5b0b19bd..3f0d64f1 100644 --- a/src/commands/content-type/import.spec.ts +++ b/src/commands/content-type/import.spec.ts @@ -507,7 +507,7 @@ describe('content-type import command', (): void => { }), new Map([]) ) - ).rejects.toThrowError( + ).rejects.toThrow( new Error('Invalid format supplied for repositories. Please provide an array of repository names') ); }); @@ -522,7 +522,7 @@ describe('content-type import command', (): void => { }), new Map([]) ) - ).rejects.toThrowError( + ).rejects.toThrow( new Error('Invalid format supplied for repositories. Please provide an array of repository names') ); }); diff --git a/src/commands/content-type/list.spec.ts b/src/commands/content-type/list.spec.ts index 33db409d..046feb1d 100644 --- a/src/commands/content-type/list.spec.ts +++ b/src/commands/content-type/list.spec.ts @@ -67,8 +67,8 @@ describe('content-type list command', (): void => { const argv = { ...yargArgs, ...config, ...pagingOptions }; await handler(argv); - expect(mockGetHub).toBeCalledWith('hub-id'); - expect(mockList).toBeCalledWith({ size: DEFAULT_SIZE, ...pagingOptions }); + expect(mockGetHub).toHaveBeenCalledWith('hub-id'); + expect(mockList).toHaveBeenCalledWith({ size: DEFAULT_SIZE, ...pagingOptions }); expect(mockDataPresenter).toHaveBeenCalledWith(plainListContentTypes); expect(mockDataPresenter.mock.instances[0].render).toHaveBeenCalledWith({ json: argv.json, itemMapFn }); diff --git a/src/commands/content-type/register.spec.ts b/src/commands/content-type/register.spec.ts index f56939ae..e09acc3f 100644 --- a/src/commands/content-type/register.spec.ts +++ b/src/commands/content-type/register.spec.ts @@ -164,7 +164,7 @@ describe('content-type register command', () => { await handler(argv); - expect(mockGetHub).toBeCalledWith('hub-id'); + expect(mockGetHub).toHaveBeenCalledWith('hub-id'); expect(mockRegister).toHaveBeenCalledWith(expect.objectContaining(registerResponse.toJSON())); expect(mockDataPresenter).toHaveBeenCalledWith(plainContentType); expect(mockDataPresenter.mock.instances[0].render).toHaveBeenCalledWith({ diff --git a/src/commands/event/archive.spec.ts b/src/commands/event/archive.spec.ts index d7bf77ee..049bb8ce 100644 --- a/src/commands/event/archive.spec.ts +++ b/src/commands/event/archive.spec.ts @@ -338,7 +338,7 @@ describe('event archive command', () => { expect(mockGet).toHaveBeenCalled(); expect(mockEditionsList).toHaveBeenCalled(); - expect(deleteMock).toBeCalledTimes(1); + expect(deleteMock).toHaveBeenCalledTimes(1); }); it('should archive event with published', async () => { @@ -356,7 +356,7 @@ describe('event archive command', () => { expect(mockGet).toHaveBeenCalled(); expect(mockEditionsList).toHaveBeenCalled(); - expect(archiveMock).toBeCalledTimes(2); + expect(archiveMock).toHaveBeenCalledTimes(2); }); it('should archive events when multiple ids provided', async () => { @@ -374,7 +374,7 @@ describe('event archive command', () => { expect(mockGet).toHaveBeenCalledTimes(4); expect(mockEditionsList).toHaveBeenCalledTimes(2); - expect(archiveMock).toBeCalledTimes(4); + expect(archiveMock).toHaveBeenCalledTimes(4); }); it('should delete event with scheduled edition', async () => { @@ -392,7 +392,7 @@ describe('event archive command', () => { expect(mockGet).toHaveBeenCalled(); expect(mockEditionsList).toHaveBeenCalled(); - expect(deleteMock).toBeCalledTimes(2); + expect(deleteMock).toHaveBeenCalledTimes(2); }); it('should archive event with published+scheduled edition, after deleting the scheduled one', async () => { @@ -414,8 +414,8 @@ describe('event archive command', () => { expect(mockGet).toHaveBeenCalled(); expect(mockEditionGet).toHaveBeenCalled(); expect(mockEditionsList).toHaveBeenCalled(); - expect(deleteMock).toBeCalledTimes(2); - expect(archiveMock).toBeCalledTimes(2); + expect(deleteMock).toHaveBeenCalledTimes(2); + expect(archiveMock).toHaveBeenCalledTimes(2); }); it("shouldn't archive event, no id", async () => { @@ -453,7 +453,7 @@ describe('event archive command', () => { expect(mockEventsList).toHaveBeenCalled(); expect(mockEditionsList).toHaveBeenCalled(); expect(mockGet).toHaveBeenCalled(); - expect(deleteMock).toBeCalledTimes(2); + expect(deleteMock).toHaveBeenCalledTimes(2); }); it("shouldn't archive event by name", async () => { @@ -491,7 +491,7 @@ describe('event archive command', () => { expect(mockGet).toHaveBeenCalled(); expect(mockEditionsList).toHaveBeenCalled(); - expect(deleteMock).toBeCalledTimes(0); + expect(deleteMock).toHaveBeenCalledTimes(0); }); it('should log error, no resource', async () => { diff --git a/src/commands/event/import.spec.ts b/src/commands/event/import.spec.ts index 408693d4..12ea95b1 100644 --- a/src/commands/event/import.spec.ts +++ b/src/commands/event/import.spec.ts @@ -1369,7 +1369,7 @@ Array [ expect(importSlots).toHaveBeenCalledWith(slots, mapping, hub, realEdition, argv, log); expect(scheduleEdition).toHaveBeenCalledWith(expect.any(Edition), log); - expect(importModule.prepareEditionForSchedule).toBeCalledTimes(2); // This should be called again just before schedule. + expect(importModule.prepareEditionForSchedule).toHaveBeenCalledTimes(2); // This should be called again just before schedule. }); }); diff --git a/src/commands/hub/steps/event-clone-step.spec.ts b/src/commands/hub/steps/event-clone-step.spec.ts index b9e65bb8..f79ab041 100644 --- a/src/commands/hub/steps/event-clone-step.spec.ts +++ b/src/commands/hub/steps/event-clone-step.spec.ts @@ -155,7 +155,7 @@ describe('event clone step', () => { ...state.from }); - expect(eventImport.handler).toBeCalledWith({ + expect(eventImport.handler).toHaveBeenCalledWith({ dir: join(state.path, 'event'), originalIds: false, schedule: true, @@ -178,8 +178,8 @@ describe('event clone step', () => { const backupFail = await step.run(state); expect(backupFail).toBeFalsy(); - expect(eventExport.handler).toBeCalledTimes(1); - expect(eventImport.handler).not.toBeCalled(); + expect(eventExport.handler).toHaveBeenCalledTimes(1); + expect(eventImport.handler).not.toHaveBeenCalled(); reset(); @@ -189,8 +189,8 @@ describe('event clone step', () => { const exportFail = await step.run(state); expect(exportFail).toBeFalsy(); - expect(eventExport.handler).toBeCalledTimes(2); - expect(eventImport.handler).not.toBeCalled(); + expect(eventExport.handler).toHaveBeenCalledTimes(2); + expect(eventImport.handler).not.toHaveBeenCalled(); reset(); @@ -200,8 +200,8 @@ describe('event clone step', () => { const importFail = await step.run(state); expect(importFail).toBeFalsy(); - expect(eventExport.handler).toBeCalledTimes(2); - expect(eventImport.handler).toBeCalled(); + expect(eventExport.handler).toHaveBeenCalledTimes(2); + expect(eventImport.handler).toHaveBeenCalled(); }); it('should attempt to archive events with the CREATE action on revert, skipping archived events', async () => { @@ -222,7 +222,7 @@ describe('event clone step', () => { expect(mockArchiveEvent).toHaveBeenCalledTimes(1); expect(mockFailArchiveEvent).toHaveBeenCalledTimes(1); expect(state.logFile.getData('ARCHIVE').length).toEqual(1); - expect(eventImport.handler).not.toBeCalled(); + expect(eventImport.handler).not.toHaveBeenCalled(); }); it('should pass events with the UPDATE action to the event import command on revert, in the oldEvent folder', async () => { @@ -241,7 +241,7 @@ describe('event clone step', () => { const result = await step.revert(state); expect(mockArchiveEvent).toHaveBeenCalledTimes(1); - expect(eventImport.handler).toBeCalledWith({ + expect(eventImport.handler).toHaveBeenCalledWith({ dir: join(state.path, 'oldEvent'), originalIds: true, schedule: true, diff --git a/src/commands/hub/steps/extension-clone-step.spec.ts b/src/commands/hub/steps/extension-clone-step.spec.ts index e4cbfd69..59fb6c47 100644 --- a/src/commands/hub/steps/extension-clone-step.spec.ts +++ b/src/commands/hub/steps/extension-clone-step.spec.ts @@ -119,7 +119,7 @@ describe('extension clone step', () => { ...state.from }); - expect(extensionImport.handler).toBeCalledWith({ + expect(extensionImport.handler).toHaveBeenCalledWith({ dir: join(state.path, 'extension'), logFile: state.logFile, ...state.to @@ -137,8 +137,8 @@ describe('extension clone step', () => { const backupFail = await step.run(state); expect(backupFail).toBeFalsy(); - expect(extensionExport.handler).toBeCalledTimes(1); - expect(extensionImport.handler).not.toBeCalled(); + expect(extensionExport.handler).toHaveBeenCalledTimes(1); + expect(extensionImport.handler).not.toHaveBeenCalled(); reset(); @@ -148,8 +148,8 @@ describe('extension clone step', () => { const exportFail = await step.run(state); expect(exportFail).toBeFalsy(); - expect(extensionExport.handler).toBeCalledTimes(2); - expect(extensionImport.handler).not.toBeCalled(); + expect(extensionExport.handler).toHaveBeenCalledTimes(2); + expect(extensionImport.handler).not.toHaveBeenCalled(); reset(); @@ -159,8 +159,8 @@ describe('extension clone step', () => { const importFail = await step.run(state); expect(importFail).toBeFalsy(); - expect(extensionExport.handler).toBeCalledTimes(2); - expect(extensionImport.handler).toBeCalled(); + expect(extensionExport.handler).toHaveBeenCalledTimes(2); + expect(extensionImport.handler).toHaveBeenCalled(); }); it('should pass extensions with the UPDATE action to the extension import command on revert, in the oldExtension folder', async () => { @@ -178,7 +178,7 @@ describe('extension clone step', () => { const step = new ExtensionCloneStep(); const result = await step.revert(state); - expect(extensionImport.handler).toBeCalledWith( + expect(extensionImport.handler).toHaveBeenCalledWith( { dir: join(state.path, 'oldExtension'), logFile: state.logFile, diff --git a/src/commands/hub/steps/index-clone-step.spec.ts b/src/commands/hub/steps/index-clone-step.spec.ts index 9c5fe847..0a3b4fbf 100644 --- a/src/commands/hub/steps/index-clone-step.spec.ts +++ b/src/commands/hub/steps/index-clone-step.spec.ts @@ -119,7 +119,7 @@ describe('index clone step', () => { ...state.from }); - expect(indexImport.handler).toBeCalledWith({ + expect(indexImport.handler).toHaveBeenCalledWith({ dir: join(state.path, 'index'), logFile: state.logFile, webhooks: true, @@ -138,8 +138,8 @@ describe('index clone step', () => { const backupFail = await step.run(state); expect(backupFail).toBeFalsy(); - expect(indexExport.handler).toBeCalledTimes(1); - expect(indexImport.handler).not.toBeCalled(); + expect(indexExport.handler).toHaveBeenCalledTimes(1); + expect(indexImport.handler).not.toHaveBeenCalled(); reset(); @@ -149,8 +149,8 @@ describe('index clone step', () => { const exportFail = await step.run(state); expect(exportFail).toBeFalsy(); - expect(indexExport.handler).toBeCalledTimes(2); - expect(indexImport.handler).not.toBeCalled(); + expect(indexExport.handler).toHaveBeenCalledTimes(2); + expect(indexImport.handler).not.toHaveBeenCalled(); reset(); @@ -160,8 +160,8 @@ describe('index clone step', () => { const importFail = await step.run(state); expect(importFail).toBeFalsy(); - expect(indexExport.handler).toBeCalledTimes(2); - expect(indexImport.handler).toBeCalled(); + expect(indexExport.handler).toHaveBeenCalledTimes(2); + expect(indexImport.handler).toHaveBeenCalled(); }); it('should pass indexes with the UPDATE action to the index import command on revert, in the oldIndex folder', async () => { @@ -179,7 +179,7 @@ describe('index clone step', () => { const step = new IndexCloneStep(); const result = await step.revert(state); - expect(indexImport.handler).toBeCalledWith( + expect(indexImport.handler).toHaveBeenCalledWith( { dir: join(state.path, 'oldIndex'), logFile: state.logFile, diff --git a/src/commands/hub/steps/schema-clone-step.spec.ts b/src/commands/hub/steps/schema-clone-step.spec.ts index 827458a1..39c7754e 100644 --- a/src/commands/hub/steps/schema-clone-step.spec.ts +++ b/src/commands/hub/steps/schema-clone-step.spec.ts @@ -121,7 +121,7 @@ describe('schema clone step', () => { ...state.from }); - expect(schemaImport.handler).toBeCalledWith({ + expect(schemaImport.handler).toHaveBeenCalledWith({ dir: join(state.path, 'schema'), logFile: state.logFile, ...state.to diff --git a/src/commands/hub/steps/type-clone-step.spec.ts b/src/commands/hub/steps/type-clone-step.spec.ts index f7e36661..c21b8562 100644 --- a/src/commands/hub/steps/type-clone-step.spec.ts +++ b/src/commands/hub/steps/type-clone-step.spec.ts @@ -129,7 +129,7 @@ describe('type clone step', () => { ...state.from }); - expect(typeImport.handler).toBeCalledWith({ + expect(typeImport.handler).toHaveBeenCalledWith({ dir: join(state.path, 'type'), sync: true, logFile: state.logFile, @@ -148,8 +148,8 @@ describe('type clone step', () => { const backupFail = await step.run(state); expect(backupFail).toBeFalsy(); - expect(typeExport.handler).toBeCalledTimes(1); - expect(typeImport.handler).not.toBeCalled(); + expect(typeExport.handler).toHaveBeenCalledTimes(1); + expect(typeImport.handler).not.toHaveBeenCalled(); reset(); @@ -159,8 +159,8 @@ describe('type clone step', () => { const exportFail = await step.run(state); expect(exportFail).toBeFalsy(); - expect(typeExport.handler).toBeCalledTimes(2); - expect(typeImport.handler).not.toBeCalled(); + expect(typeExport.handler).toHaveBeenCalledTimes(2); + expect(typeImport.handler).not.toHaveBeenCalled(); reset(); @@ -170,8 +170,8 @@ describe('type clone step', () => { const importFail = await step.run(state); expect(importFail).toBeFalsy(); - expect(typeExport.handler).toBeCalledTimes(2); - expect(typeImport.handler).toBeCalled(); + expect(typeExport.handler).toHaveBeenCalledTimes(2); + expect(typeImport.handler).toHaveBeenCalled(); }); it('should attempt to archive types with the CREATE action on revert, skipping archived types', async () => { @@ -193,7 +193,7 @@ describe('type clone step', () => { await step.revert(state); expect(mockContent.metrics.typesArchived).toEqual(1); - expect(typeImport.handler).not.toBeCalled(); + expect(typeImport.handler).not.toHaveBeenCalled(); }); it('should pass types with the UPDATE action to the type import command on revert, in the oldType folder', async () => { @@ -212,7 +212,7 @@ describe('type clone step', () => { const result = await step.revert(state); expect(mockContent.metrics.typesArchived).toEqual(1); - expect(typeImport.handler).toBeCalledWith( + expect(typeImport.handler).toHaveBeenCalledWith( { dir: join(state.path, 'oldType'), sync: true, diff --git a/src/common/file-log.spec.ts b/src/common/file-log.spec.ts index dac33b7e..79ce5e96 100644 --- a/src/common/file-log.spec.ts +++ b/src/common/file-log.spec.ts @@ -13,7 +13,7 @@ describe('file-log', () => { const writeSpy = jest.spyOn(log, 'writeToFile').mockImplementation(() => Promise.resolve(true)); await log.close(); - expect(writeSpy).toBeCalled(); + expect(writeSpy).toHaveBeenCalled(); }); it('should not create a log file when filename is null, and closed', async () => { @@ -22,7 +22,7 @@ describe('file-log', () => { const writeSpy = jest.spyOn(log, 'writeToFile').mockImplementation(() => Promise.resolve(true)); await log.close(); - expect(writeSpy).not.toBeCalled(); + expect(writeSpy).not.toHaveBeenCalled(); }); it('should embed the date in the filename', async () => { @@ -55,11 +55,11 @@ describe('file-log', () => { const writeSpy = jest.spyOn(log, 'writeToFile').mockImplementation(() => Promise.resolve(true)); await log.close(); - expect(writeSpy).not.toBeCalled(); // There is still a user, shouldn't save yet. + expect(writeSpy).not.toHaveBeenCalled(); // There is still a user, shouldn't save yet. await log.close(); - expect(writeSpy).toBeCalled(); + expect(writeSpy).toHaveBeenCalled(); }); it('should not save a log file if false is provided to the close method, and it is the last close', async () => { @@ -68,7 +68,7 @@ describe('file-log', () => { const writeSpy = jest.spyOn(log, 'writeToFile').mockImplementation(() => Promise.resolve(true)); await log.close(false); - expect(writeSpy).not.toBeCalled(); + expect(writeSpy).not.toHaveBeenCalled(); }); }); }); diff --git a/src/common/yargs/yargs-object-transformer.spec.ts b/src/common/yargs/yargs-object-transformer.spec.ts index b4cc576a..7e179cdc 100644 --- a/src/common/yargs/yargs-object-transformer.spec.ts +++ b/src/common/yargs/yargs-object-transformer.spec.ts @@ -100,7 +100,7 @@ describe('transformYargObjectToArray', () => { } }; - expect(() => transformYargObjectToArray(yargsObject)).toThrowError( + expect(() => transformYargObjectToArray(yargsObject)).toThrow( new Error('Targeted array indexes are unsupported, please provide a full array index starting at 0') ); }); @@ -126,7 +126,7 @@ describe('transformYargObjectToArray', () => { } }; - expect(() => transformYargObjectToArray(yargsObject)).toThrowError( + expect(() => transformYargObjectToArray(yargsObject)).toThrow( new Error('Targeted array indexes are unsupported, please provide a full array index starting at 0') ); }); diff --git a/src/services/export.service.spec.ts b/src/services/export.service.spec.ts index 7b88b980..f6193e82 100644 --- a/src/services/export.service.spec.ts +++ b/src/services/export.service.spec.ts @@ -63,7 +63,7 @@ describe('export service tests', () => { (fs.writeFileSync as jest.Mock).mockImplementationOnce(() => { throw new Error('Cannot write to file/directory'); }); - expect(() => writeJsonToFile('my-filename', new ContentType())).toThrowError( + expect(() => writeJsonToFile('my-filename', new ContentType())).toThrow( /^Unable to write file: my-filename, aborting export$/ ); }); diff --git a/src/services/import.service.spec.ts b/src/services/import.service.spec.ts index 79af522d..51bfad96 100644 --- a/src/services/import.service.spec.ts +++ b/src/services/import.service.spec.ts @@ -17,8 +17,6 @@ describe('loadJsonFromDirectory tests', () => { it('should throw an error if any import file is not json', (): void => { expect(() => loadJsonFromDirectory(__dirname + '/__fixtures__/load-json-from-directory/bad-json/', ContentType) - ).toThrowError( - /^Non-JSON file found: .*__fixtures__\/load-json-from-directory\/bad-json\/bad-json\.json, aborting...$/ - ); + ).toThrow(/^Non-JSON file found: .*__fixtures__\/load-json-from-directory\/bad-json\/bad-json\.json, aborting...$/); }); }); From 703f5793ba099dc32889dbb133f3f7d9973ca776 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 22 Aug 2025 10:59:19 +0100 Subject: [PATCH 02/43] feat: adding burstable queue --- package-lock.json | 249 ++++++++++++++++++ package.json | 1 + .../burstable-queue/burstable-queue.spec.ts | 152 +++++++++++ src/common/burstable-queue/burstable-queue.ts | 52 ++++ 4 files changed, 454 insertions(+) create mode 100644 src/common/burstable-queue/burstable-queue.spec.ts create mode 100644 src/common/burstable-queue/burstable-queue.ts diff --git a/package-lock.json b/package-lock.json index e0ba6c2a..27b6ea3c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,6 +12,7 @@ "ajv": "^6.12.3", "axios": "^1.10.0", "axios-retry": "^4.5.0", + "bottleneck": "^2.19.5", "chalk": "^2.4.2", "cli-progress": "^3.12.0", "dc-management-sdk-js": "^3.0.3", @@ -2758,6 +2759,248 @@ "@sinonjs/commons": "^3.0.1" } }, + "node_modules/@swc/core": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.13.3.tgz", + "integrity": "sha512-ZaDETVWnm6FE0fc+c2UE8MHYVS3Fe91o5vkmGfgwGXFbxYvAjKSqxM/j4cRc9T7VZNSJjriXq58XkfCp3Y6f+w==", + "dev": true, + "hasInstallScript": true, + "license": "Apache-2.0", + "optional": true, + "peer": true, + "dependencies": { + "@swc/counter": "^0.1.3", + "@swc/types": "^0.1.23" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/swc" + }, + "optionalDependencies": { + "@swc/core-darwin-arm64": "1.13.3", + "@swc/core-darwin-x64": "1.13.3", + "@swc/core-linux-arm-gnueabihf": "1.13.3", + "@swc/core-linux-arm64-gnu": "1.13.3", + "@swc/core-linux-arm64-musl": "1.13.3", + "@swc/core-linux-x64-gnu": "1.13.3", + "@swc/core-linux-x64-musl": "1.13.3", + "@swc/core-win32-arm64-msvc": "1.13.3", + "@swc/core-win32-ia32-msvc": "1.13.3", + "@swc/core-win32-x64-msvc": "1.13.3" + }, + "peerDependencies": { + "@swc/helpers": ">=0.5.17" + }, + "peerDependenciesMeta": { + "@swc/helpers": { + "optional": true + } + } + }, + "node_modules/@swc/core-darwin-arm64": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.13.3.tgz", + "integrity": "sha512-ux0Ws4pSpBTqbDS9GlVP354MekB1DwYlbxXU3VhnDr4GBcCOimpocx62x7cFJkSpEBF8bmX8+/TTCGKh4PbyXw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "darwin" + ], + "peer": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-darwin-x64": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.13.3.tgz", + "integrity": "sha512-p0X6yhxmNUOMZrbeZ3ZNsPige8lSlSe1llllXvpCLkKKxN/k5vZt1sULoq6Nj4eQ7KeHQVm81/+AwKZyf/e0TA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "darwin" + ], + "peer": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm-gnueabihf": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.13.3.tgz", + "integrity": "sha512-OmDoiexL2fVWvQTCtoh0xHMyEkZweQAlh4dRyvl8ugqIPEVARSYtaj55TBMUJIP44mSUOJ5tytjzhn2KFxFcBA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "peer": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-gnu": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.13.3.tgz", + "integrity": "sha512-STfKku3QfnuUj6k3g9ld4vwhtgCGYIFQmsGPPgT9MK/dI3Lwnpe5Gs5t1inoUIoGNP8sIOLlBB4HV4MmBjQuhw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "peer": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-musl": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.13.3.tgz", + "integrity": "sha512-bc+CXYlFc1t8pv9yZJGus372ldzOVscBl7encUBlU1m/Sig0+NDJLz6cXXRcFyl6ABNOApWeR4Yl7iUWx6C8og==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "peer": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-gnu": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.13.3.tgz", + "integrity": "sha512-dFXoa0TEhohrKcxn/54YKs1iwNeW6tUkHJgXW33H381SvjKFUV53WR231jh1sWVJETjA3vsAwxKwR23s7UCmUA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "peer": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-musl": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.13.3.tgz", + "integrity": "sha512-ieyjisLB+ldexiE/yD8uomaZuZIbTc8tjquYln9Quh5ykOBY7LpJJYBWvWtm1g3pHv6AXlBI8Jay7Fffb6aLfA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "peer": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-arm64-msvc": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.13.3.tgz", + "integrity": "sha512-elTQpnaX5vESSbhCEgcwXjpMsnUbqqHfEpB7ewpkAsLzKEXZaK67ihSRYAuAx6ewRQTo7DS5iTT6X5aQD3MzMw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "win32" + ], + "peer": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-ia32-msvc": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.13.3.tgz", + "integrity": "sha512-nvehQVEOdI1BleJpuUgPLrclJ0TzbEMc+MarXDmmiRFwEUGqj+pnfkTSb7RZyS1puU74IXdK/YhTirHurtbI9w==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "win32" + ], + "peer": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-x64-msvc": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.13.3.tgz", + "integrity": "sha512-A+JSKGkRbPLVV2Kwx8TaDAV0yXIXm/gc8m98hSkVDGlPBBmydgzNdWy3X7HTUBM7IDk7YlWE7w2+RUGjdgpTmg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "win32" + ], + "peer": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/counter": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", + "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", + "dev": true, + "license": "Apache-2.0", + "optional": true, + "peer": true + }, + "node_modules/@swc/types": { + "version": "0.1.24", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.24.tgz", + "integrity": "sha512-tjTMh3V4vAORHtdTprLlfoMptu1WfTZG9Rsca6yOKyNYsRr+MUXutKmliB17orgSZk5DpnDxs8GUdd/qwYxOng==", + "dev": true, + "license": "Apache-2.0", + "optional": true, + "peer": true, + "dependencies": { + "@swc/counter": "^0.1.3" + } + }, "node_modules/@tsconfig/node10": { "version": "1.0.9", "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", @@ -4154,6 +4397,12 @@ "dev": true, "license": "MIT" }, + "node_modules/bottleneck": { + "version": "2.19.5", + "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", + "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==", + "license": "MIT" + }, "node_modules/brace-expansion": { "version": "1.1.12", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", diff --git a/package.json b/package.json index d1e152e7..598cf19e 100644 --- a/package.json +++ b/package.json @@ -112,6 +112,7 @@ "ajv": "^6.12.3", "axios": "^1.10.0", "axios-retry": "^4.5.0", + "bottleneck": "^2.19.5", "chalk": "^2.4.2", "cli-progress": "^3.12.0", "dc-management-sdk-js": "^3.0.3", diff --git a/src/common/burstable-queue/burstable-queue.spec.ts b/src/common/burstable-queue/burstable-queue.spec.ts new file mode 100644 index 00000000..e28470dc --- /dev/null +++ b/src/common/burstable-queue/burstable-queue.spec.ts @@ -0,0 +1,152 @@ +import { BurstableQueue } from './burstable-queue'; +import { setTimeout } from 'node:timers/promises'; + +describe('burstable-queue', () => { + it('should schedule task and execute them with an initial burst', async () => { + const interval = 500; + const burstableQueue = new BurstableQueue({ + concurrency: 1, + burstIntervalCap: 4, + sustainedIntervalCap: 1, + interval + }); + const tasks = [...Array.from({ length: 8 }).keys()]; + const completeTasks: number[] = []; + + for (const task of tasks) { + burstableQueue.add(async () => { + await setTimeout(50); + completeTasks.push(task); + }); + } + + expect(burstableQueue.size()).toEqual(8); + expect(completeTasks).toHaveLength(0); + await setTimeout(interval); + expect(burstableQueue.size()).toEqual(4); + expect(completeTasks).toHaveLength(4); + await setTimeout(interval); + expect(burstableQueue.size()).toEqual(3); + expect(completeTasks).toHaveLength(5); + await setTimeout(interval); + expect(burstableQueue.size()).toEqual(2); + expect(completeTasks).toHaveLength(6); + await setTimeout(interval); + expect(burstableQueue.size()).toEqual(1); + expect(completeTasks).toHaveLength(7); + await setTimeout(interval); + expect(burstableQueue.size()).toEqual(0); + expect(completeTasks).toHaveLength(8); + }); + + describe('add', () => { + it('should add a task to the queue', () => { + const burstableQueue = new BurstableQueue({}); + burstableQueue.add(async () => { + await setTimeout(50); + }); + expect(burstableQueue.size()).toEqual(1); + }); + }); + + describe('onIdle', () => { + it('should wait until the the queue is idle (queue is empty and all tasks executed)', async () => { + const burstableQueue = new BurstableQueue({ + concurrency: 1, + burstIntervalCap: 4, + sustainedIntervalCap: 1, + interval: 400 + }); + const tasks = [...Array.from({ length: 8 }).keys()]; + const completeTasks: number[] = []; + + for (const task of tasks) { + burstableQueue.add(async () => { + await setTimeout(50); + completeTasks.push(task); + }); + } + + await burstableQueue.onIdle(); + + expect(burstableQueue.size()).toEqual(0); + expect(completeTasks).toHaveLength(8); + }); + }); + describe('size()', () => { + it('should return the size of the queue (queued and executing) - all queued', () => { + const burstableQueue = new BurstableQueue({ + concurrency: 1, + burstIntervalCap: 4, + sustainedIntervalCap: 1, + interval: 400 + }); + const tasks = [...Array.from({ length: 8 }).keys()]; + + tasks.forEach(() => { + burstableQueue.add(async () => { + await setTimeout(50); + }); + }); + + expect(burstableQueue.size()).toEqual(8); + }); + it('should return the size of the queue (queued and executing) - queue task in flight', async () => { + const burstableQueue = new BurstableQueue({ + concurrency: 1, + burstIntervalCap: 4, + sustainedIntervalCap: 1, + interval: 400 + }); + const tasks = [...Array.from({ length: 8 }).keys()]; + + tasks.forEach(() => { + burstableQueue.add(async () => { + await setTimeout(50); + }); + }); + + expect(burstableQueue.size()).toEqual(8); + await setTimeout(400); + expect(burstableQueue.size()).toEqual(4); + }); + }); + describe('pending()', () => { + it('should return the number of pending queue items (queued, not executing)', () => { + const burstableQueue = new BurstableQueue({ + concurrency: 1, + burstIntervalCap: 4, + sustainedIntervalCap: 1, + interval: 400 + }); + const tasks = [...Array.from({ length: 8 }).keys()]; + + tasks.forEach(() => { + burstableQueue.add(async () => { + await setTimeout(50); + }); + }); + + expect(burstableQueue.pending()).toEqual(8); + }); + it('should return the number of pending queue items (queued, not executing) - queue task in flight', async () => { + const burstableQueue = new BurstableQueue({ + concurrency: 1, + burstIntervalCap: 4, + sustainedIntervalCap: 1, + interval: 400 + }); + const tasks = [...Array.from({ length: 8 }).keys()]; + + tasks.forEach(() => { + burstableQueue.add(async () => { + await setTimeout(50); + }); + }); + + expect(burstableQueue.pending()).toEqual(8); + await setTimeout(400); + expect(burstableQueue.pending()).toEqual(4); + }); + }); +}); diff --git a/src/common/burstable-queue/burstable-queue.ts b/src/common/burstable-queue/burstable-queue.ts new file mode 100644 index 00000000..b9b8587f --- /dev/null +++ b/src/common/burstable-queue/burstable-queue.ts @@ -0,0 +1,52 @@ +import Bottleneck from 'bottleneck'; + +const CONCURRENCY = 4; +const INITIAL_RESERVOIR = 70; +const RESERVOIR_REFRESH_AMOUNT = 30; +const RESERVOIR_INCREASE_INTERVAL = 60_000; + +export interface BurstableQueueOptions { + concurrency?: number; + burstIntervalCap?: number; + sustainedIntervalCap?: number; + interval?: number; +} + +export class BurstableQueue { + private queue; + + constructor(options: BurstableQueueOptions) { + this.queue = new Bottleneck({ + maxConcurrent: options.concurrency || CONCURRENCY, + reservoir: options.burstIntervalCap || INITIAL_RESERVOIR, // initial value + reservoirRefreshAmount: options.sustainedIntervalCap || RESERVOIR_REFRESH_AMOUNT, + reservoirRefreshInterval: options.interval || RESERVOIR_INCREASE_INTERVAL + }); + } + + size(): number { + const { RECEIVED, QUEUED, RUNNING, EXECUTING } = this.queue.counts(); + return RECEIVED + QUEUED + RUNNING + EXECUTING; + } + + pending(): number { + const { RECEIVED, QUEUED } = this.queue.counts(); + return RECEIVED + QUEUED; + } + + async onIdle(): Promise { + if (this.size() === 0) { + return; + } + + return new Promise(resolve => { + this.queue.on('idle', () => { + resolve(); + }); + }); + } + + async add(fn: () => Promise): Promise { + return this.queue.schedule(fn); + } +} From 4c72bb49de49335fae43674063a2f453b26763f0 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 22 Aug 2025 13:49:04 +0100 Subject: [PATCH 03/43] feat: adding publishing and publishing job services - wip --- .../publishing/publishing-job-service.ts | 39 +++++++++++++++++++ src/common/publishing/publishing-service.ts | 27 +++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 src/common/publishing/publishing-job-service.ts create mode 100644 src/common/publishing/publishing-service.ts diff --git a/src/common/publishing/publishing-job-service.ts b/src/common/publishing/publishing-job-service.ts new file mode 100644 index 00000000..874d569f --- /dev/null +++ b/src/common/publishing/publishing-job-service.ts @@ -0,0 +1,39 @@ +import { DynamicContent } from 'dc-management-sdk-js'; +import { BurstableQueue } from '../burstable-queue/burstable-queue'; + +export class PublishingJobService { + private client; + private queue; + private resolvedPublishJobs = []; + + constructor(client: DynamicContent) { + this.client = client; + this.queue = new BurstableQueue({}); + } + + async check(publishJob, action: () => Promise) { + this.queue.add(async () => { + const checkedPublishJob = await this.client.publishJobs.get(publishJob.id); + + if (checkedPublishJob.state === 'FAILED' || checkedPublishJob.state === 'COMPLETED') { + this.resolvedPublishJobs.push(checkedPublishJob); + } else { + // if publish has not been done then add it to the back of the queue + this.check(publishJob, action); + } + action(); + }); + } + + async onIdle() { + return this.queue.onIdle(); + } + + completeJobs() { + return resolvedPublishJobs.find(job => job.state === 'COMPLETED'); + } + + failedJobs() { + return resolvedPublishJobs.find(job => job.state === 'FAILED'); + } +} diff --git a/src/common/publishing/publishing-service.ts b/src/common/publishing/publishing-service.ts new file mode 100644 index 00000000..aeae5f5d --- /dev/null +++ b/src/common/publishing/publishing-service.ts @@ -0,0 +1,27 @@ +import { ContentItem } from 'dc-management-sdk-js'; +import { BurstableQueue } from '../burstable-queue/burstable-queue'; + +export class PublishingService { + private queue; + private _publishJobs = []; + + constructor() { + this.queue = new BurstableQueue({}); + } + + async publish(contentItem: ContentItem, action: () => void) { + this.queue.add(async () => { + const publishJob = await contentItem.related.publish(); + this._publishJobs.push(publishJob); + action(); + }); + } + + get publishJobs() { + return this._publishJobs; + } + + async onIdle() { + return this.queue.onIdle(); + } +} From 92d98e4bbb751ece9b6db90b7229b1ca45c21b00 Mon Sep 17 00:00:00 2001 From: DB Date: Fri, 29 Aug 2025 15:55:54 +0100 Subject: [PATCH 04/43] feat: first pass content item publish (not tested) wip --- src/commands/content-item/publish.ts | 45 ++++++++++-------- .../publishing/publishing-job-service.ts | 46 ++++++++++++++++--- src/common/publishing/publishing-service.ts | 2 +- 3 files changed, 67 insertions(+), 26 deletions(-) diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index 08367bc9..fe27f9bc 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -9,10 +9,12 @@ import { getDefaultLogPath, createLog } from '../../common/log-helpers'; import { FileLog } from '../../common/file-log'; import { withOldFilters } from '../../common/filter/facet'; import { getContent } from '../../common/filter/fetch-content'; -import { MAX_PUBLISH_RATE_LIMIT, PublishQueue } from '../../common/import/publish-queue'; +import { MAX_PUBLISH_RATE_LIMIT } from '../../common/import/publish-queue'; import { asyncQuestion } from '../../common/question-helpers'; import { ContentDependancyTree } from '../../common/content-item/content-dependancy-tree'; import { ContentMapping } from '../../common/content-mapping'; +import { PublishingService } from '../../common/publishing/publishing-service'; +import { PublishingJobService } from '../../common/publishing/publishing-job-service'; export const command = 'publish [id]'; @@ -188,18 +190,19 @@ export const processItems = async ({ } } - const pubQueue = new PublishQueue(argv); - log.appendLine(`Publishing ${rootContentItems.length} items.`); - if (!argv.batchPublish) { - pubQueue.maxWaiting = 1; - } + // if (!argv.batchPublish) { + // pubQueue.maxWaiting = 1; + // } + + const publishingService = new PublishingService(); for (const item of rootContentItems) { try { - await pubQueue.publish(item); - log.appendLine(`Initiating publish for "${item.label}"`); + await publishingService.publish(item, () => { + log.appendLine(`Initiating publish for "${item.label}"`); + }); } catch (e) { log.appendLine(`Failed to initiate publish for ${item.label}: ${e.toString()}`); } @@ -207,26 +210,32 @@ export const processItems = async ({ log.appendLine(`Waiting for all publish jobs to complete...`); - let keepWaiting = true; + const client = dynamicContentClientFactory(argv); + const publishingJobService = new PublishingJobService(client); - while (!pubQueue.isEmpty() && keepWaiting) { - await pubQueue.waitForAll(); + for (const publishJob of publishingService.publishJobs) { + publishingJobService.check(publishJob, async () => { + log.appendLine(`trying to retry publish ${publishJob.label}`); + }); + } - if (pubQueue.unresolvedJobs.length > 0) { + let keepWaiting = true; + while (publishingJobService.size > 0 && keepWaiting) { + if (publishingJobService.pendingSize > 0) { keepWaiting = await asyncQuestion( 'Some publishes are taking longer than expected, would you like to continue waiting? (Y/n)' ); } } - log.appendLine(`Finished publishing, with ${pubQueue.unresolvedJobs.length} unresolved publish jobs`); - pubQueue.unresolvedJobs.forEach(job => { - log.appendLine(` - ${job.item.label}`); + log.appendLine(`Finished publishing, with ${publishingJobService.pendingSize} unresolved publish jobs`); + publishingJobService.pendingPublishingContentItems.forEach(item => { + log.appendLine(` - ${item.label}`); }); - log.appendLine(`Finished publishing, with ${pubQueue.failedJobs.length} failed publish jobs`); - pubQueue.failedJobs.forEach(job => { - log.appendLine(` - ${job.item.label}`); + log.appendLine(`Finished publishing, with ${publishingJobService.failedJobs.length} failed publish jobs`); + publishingJobService.failedPublishingContentItems.forEach(item => { + log.appendLine(` - ${item.label}`); }); log.appendLine(`Publish complete`); diff --git a/src/common/publishing/publishing-job-service.ts b/src/common/publishing/publishing-job-service.ts index 874d569f..62d0b6e7 100644 --- a/src/common/publishing/publishing-job-service.ts +++ b/src/common/publishing/publishing-job-service.ts @@ -1,22 +1,38 @@ -import { DynamicContent } from 'dc-management-sdk-js'; +import { ContentItem, DynamicContent, PublishingJob } from 'dc-management-sdk-js'; import { BurstableQueue } from '../burstable-queue/burstable-queue'; +import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingStatus'; export class PublishingJobService { private client; private queue; - private resolvedPublishJobs = []; + private resolvedPublishJobs: PublishingJob[] = []; + private pendingContentItems: ContentItem[] = []; + private failedContentItems: ContentItem[] = []; constructor(client: DynamicContent) { this.client = client; this.queue = new BurstableQueue({}); } - async check(publishJob, action: () => Promise) { + async check(publishJob: ContentItem, action: () => Promise) { this.queue.add(async () => { - const checkedPublishJob = await this.client.publishJobs.get(publishJob.id); + this.pendingContentItems.push(publishJob); + const checkedPublishJob = await this.client.publishingJobs.get(publishJob.id); - if (checkedPublishJob.state === 'FAILED' || checkedPublishJob.state === 'COMPLETED') { + if (checkedPublishJob.state === PublishingJobStatus.FAILED) { + this.failedContentItems.push(publishJob); + } + + if ( + checkedPublishJob.state === PublishingJobStatus.FAILED || + checkedPublishJob.state === PublishingJobStatus.COMPLETED + ) { this.resolvedPublishJobs.push(checkedPublishJob); + + const index = this.pendingContentItems.indexOf(publishJob); + if (index > -1) { + this.pendingContentItems.splice(index, 1); + } } else { // if publish has not been done then add it to the back of the queue this.check(publishJob, action); @@ -30,10 +46,26 @@ export class PublishingJobService { } completeJobs() { - return resolvedPublishJobs.find(job => job.state === 'COMPLETED'); + return this.resolvedPublishJobs.find(job => job.state === 'COMPLETED'); } failedJobs() { - return resolvedPublishJobs.find(job => job.state === 'FAILED'); + return this.resolvedPublishJobs.find(job => job.state === 'FAILED'); + } + + get size() { + return this.queue.size(); + } + + get pendingSize() { + return this.queue.pending(); + } + + get pendingPublishingContentItems() { + return this.pendingContentItems; + } + + get failedPublishingContentItems() { + return this.failedContentItems; } } diff --git a/src/common/publishing/publishing-service.ts b/src/common/publishing/publishing-service.ts index aeae5f5d..1a017d02 100644 --- a/src/common/publishing/publishing-service.ts +++ b/src/common/publishing/publishing-service.ts @@ -3,7 +3,7 @@ import { BurstableQueue } from '../burstable-queue/burstable-queue'; export class PublishingService { private queue; - private _publishJobs = []; + private _publishJobs: ContentItem[] = []; constructor() { this.queue = new BurstableQueue({}); From 228f4d925634f8c0e4a082fb54d8530c283044ab Mon Sep 17 00:00:00 2001 From: DB Date: Mon, 1 Sep 2025 17:10:01 +0100 Subject: [PATCH 05/43] refactor: update options, command options, modify tests --- src/commands/content-item/publish.spec.ts | 40 +++++++++++-------- src/commands/content-item/publish.ts | 23 ++++++----- src/common/burstable-queue/burstable-queue.ts | 8 ++-- src/common/publish/publish-options.ts | 1 + src/common/publishing/publishing-service.ts | 6 +-- 5 files changed, 44 insertions(+), 34 deletions(-) diff --git a/src/commands/content-item/publish.spec.ts b/src/commands/content-item/publish.spec.ts index 31e75fd8..f749b758 100644 --- a/src/commands/content-item/publish.spec.ts +++ b/src/commands/content-item/publish.spec.ts @@ -1,7 +1,7 @@ import { builder, handler, getContentItems, processItems, LOG_FILENAME, coerceLog } from './publish'; import { Status, ContentItem, DynamicContent, Hub } from 'dc-management-sdk-js'; import { FileLog } from '../../common/file-log'; -import * as publish from '../../common/import/publish-queue'; +import * as publishingService from '../../common/publishing/publishing-service'; import { Arguments } from 'yargs'; import { ConfigurationParameters } from '../configure'; import PublishOptions from '../../common/publish/publish-options'; @@ -11,7 +11,6 @@ jest.mock('../../services/dynamic-content-client-factory'); jest.mock('../../common/content-item/confirm-all-content'); jest.mock('../../common/log-helpers'); jest.mock('../../common/filter/fetch-content'); -jest.mock('../../common/import/publish-queue'); const mockClient = { contentItems: { @@ -39,10 +38,6 @@ const argv: Arguments = { } as Arguments; describe('publish tests', () => { - afterEach((): void => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (publish as any).publishCalls = []; - }); describe('builder tests', () => { it('should configure yargs', function () { const argv = Yargs(process.argv.slice(2)); @@ -75,12 +70,6 @@ describe('publish tests', () => { "Publish content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme." }); - expect(spyOption).toHaveBeenCalledWith('batchPublish', { - type: 'boolean', - boolean: true, - describe: 'Batch publish requests up to the rate limit. (35/min)' - }); - expect(spyOption).toHaveBeenCalledWith('f', { type: 'boolean', boolean: true, @@ -154,7 +143,27 @@ describe('publish tests', () => { }); describe('processItems tests', () => { - beforeEach(() => jest.clearAllMocks()); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let publishCalls: any[]; + + beforeEach(() => { + jest.clearAllMocks(); + + publishCalls = []; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (publishingService as any).PublishingService = jest.fn().mockImplementation(() => ({ + publish: jest.fn(async (item, action) => { + publishCalls.push(item); + action(); + }), + publishJobs: [], + onIdle: jest.fn().mockResolvedValue(undefined) + })); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (publishingService as any).publishCalls = publishCalls; + }); it('should exit early if no content items', async () => { console.log = jest.fn(); @@ -201,8 +210,7 @@ describe('publish tests', () => { argv }); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - expect((publish as any).publishCalls.length).toEqual(1); + expect(publishCalls.length).toBe(1); }); it('should process all items while filtering out any dependencies and call publish', async () => { @@ -235,7 +243,7 @@ describe('publish tests', () => { }); // eslint-disable-next-line @typescript-eslint/no-explicit-any - expect((publish as any).publishCalls.length).toEqual(1); + expect(publishCalls.length).toBe(1); }); }); diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index fe27f9bc..e66546fe 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -9,12 +9,12 @@ import { getDefaultLogPath, createLog } from '../../common/log-helpers'; import { FileLog } from '../../common/file-log'; import { withOldFilters } from '../../common/filter/facet'; import { getContent } from '../../common/filter/fetch-content'; -import { MAX_PUBLISH_RATE_LIMIT } from '../../common/import/publish-queue'; import { asyncQuestion } from '../../common/question-helpers'; import { ContentDependancyTree } from '../../common/content-item/content-dependancy-tree'; import { ContentMapping } from '../../common/content-mapping'; import { PublishingService } from '../../common/publishing/publishing-service'; import { PublishingJobService } from '../../common/publishing/publishing-job-service'; +import { BurstableQueueOptions, RESERVOIR_REFRESH_AMOUNT } from '../../common/burstable-queue/burstable-queue'; export const command = 'publish [id]'; @@ -47,14 +47,10 @@ export const builder = (yargs: Argv): void => { describe: "Publish content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme." }) - .option('batchPublish', { - type: 'boolean', - boolean: true, - describe: 'Batch publish requests up to the rate limit. (35/min)' - }) + .alias('publishRateLimit', 'publishRateLimit') .options('publishRateLimit', { type: 'number', - describe: `Set the number of publishes per minute (max = ${MAX_PUBLISH_RATE_LIMIT})` + describe: `Set the number of publishes per minute (max = ${RESERVOIR_REFRESH_AMOUNT})` }) .alias('f', 'force') .option('f', { @@ -192,11 +188,14 @@ export const processItems = async ({ log.appendLine(`Publishing ${rootContentItems.length} items.`); - // if (!argv.batchPublish) { - // pubQueue.maxWaiting = 1; - // } + const options: BurstableQueueOptions = {}; - const publishingService = new PublishingService(); + if (argv.publishRateLimit) { + const rate: number = parseInt(argv.publishRateLimit.toString()); + options.sustainedIntervalCap = rate; + } + + const publishingService = new PublishingService(options); for (const item of rootContentItems) { try { @@ -228,6 +227,8 @@ export const processItems = async ({ } } + await publishingService.onIdle(); + log.appendLine(`Finished publishing, with ${publishingJobService.pendingSize} unresolved publish jobs`); publishingJobService.pendingPublishingContentItems.forEach(item => { log.appendLine(` - ${item.label}`); diff --git a/src/common/burstable-queue/burstable-queue.ts b/src/common/burstable-queue/burstable-queue.ts index b9b8587f..76171eba 100644 --- a/src/common/burstable-queue/burstable-queue.ts +++ b/src/common/burstable-queue/burstable-queue.ts @@ -1,9 +1,9 @@ import Bottleneck from 'bottleneck'; -const CONCURRENCY = 4; -const INITIAL_RESERVOIR = 70; -const RESERVOIR_REFRESH_AMOUNT = 30; -const RESERVOIR_INCREASE_INTERVAL = 60_000; +export const CONCURRENCY = 4; +export const INITIAL_RESERVOIR = 70; +export const RESERVOIR_REFRESH_AMOUNT = 30; +export const RESERVOIR_INCREASE_INTERVAL = 60_000; export interface BurstableQueueOptions { concurrency?: number; diff --git a/src/common/publish/publish-options.ts b/src/common/publish/publish-options.ts index e8f6ffcc..83371b65 100644 --- a/src/common/publish/publish-options.ts +++ b/src/common/publish/publish-options.ts @@ -8,4 +8,5 @@ export default interface PublishOptions { logFile: FileLog; force?: boolean; silent?: boolean; + publishRateLimit?: number; } diff --git a/src/common/publishing/publishing-service.ts b/src/common/publishing/publishing-service.ts index 1a017d02..be9efaec 100644 --- a/src/common/publishing/publishing-service.ts +++ b/src/common/publishing/publishing-service.ts @@ -1,12 +1,12 @@ import { ContentItem } from 'dc-management-sdk-js'; -import { BurstableQueue } from '../burstable-queue/burstable-queue'; +import { BurstableQueue, BurstableQueueOptions } from '../burstable-queue/burstable-queue'; export class PublishingService { private queue; private _publishJobs: ContentItem[] = []; - constructor() { - this.queue = new BurstableQueue({}); + constructor(options: BurstableQueueOptions) { + this.queue = new BurstableQueue(options); } async publish(contentItem: ContentItem, action: () => void) { From 070d85427514855b5e5236cf887b7a41d9a88bca Mon Sep 17 00:00:00 2001 From: DB Date: Tue, 2 Sep 2025 12:58:52 +0100 Subject: [PATCH 06/43] feat: content item import publish to use new services --- src/commands/content-item/import.spec.ts | 34 +++++++++----- src/commands/content-item/import.ts | 60 ++++++++++++++---------- 2 files changed, 57 insertions(+), 37 deletions(-) diff --git a/src/commands/content-item/import.spec.ts b/src/commands/content-item/import.spec.ts index 9a084a79..5df09786 100644 --- a/src/commands/content-item/import.spec.ts +++ b/src/commands/content-item/import.spec.ts @@ -1,7 +1,7 @@ import { builder, command, handler, LOG_FILENAME, getDefaultMappingPath } from './import'; import { dependsOn, dependantType } from './__mocks__/dependant-content-helper'; import * as reverter from './import-revert'; -import * as publish from '../../common/import/publish-queue'; +import * as publishingService from '../../common/publishing/publishing-service'; import { createLog, getDefaultLogPath } from '../../common/log-helpers'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { Folder, ContentType } from 'dc-management-sdk-js'; @@ -20,7 +20,6 @@ import { MediaRewriter } from '../../common/media/media-rewriter'; jest.mock('readline'); jest.mock('./import-revert'); jest.mock('../../services/dynamic-content-client-factory'); -jest.mock('../../common/import/publish-queue'); jest.mock('../../common/media/media-rewriter'); jest.mock('../../common/log-helpers', () => ({ ...jest.requireActual('../../common/log-helpers'), @@ -111,12 +110,6 @@ describe('content-item import command', () => { 'Publish any content items that either made a new version on import, or were published more recently in the JSON.' }); - expect(spyOption).toHaveBeenCalledWith('batchPublish', { - type: 'boolean', - boolean: true, - describe: 'Batch publish requests up to the rate limit. (35/min)' - }); - expect(spyOption).toHaveBeenCalledWith('republish', { type: 'boolean', boolean: true, @@ -167,12 +160,28 @@ describe('content-item import command', () => { revertLog: Promise.resolve(undefined) }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let publishCalls: any[]; + beforeEach(async () => { + jest.clearAllMocks(); jest.mock('readline'); jest.mock('../../services/dynamic-content-client-factory'); + + publishCalls = []; + // eslint-disable-next-line @typescript-eslint/no-explicit-any - const calls = (publish as any).publishCalls; - calls.splice(0, calls.length); + (publishingService as any).PublishingService = jest.fn().mockImplementation(() => ({ + publish: jest.fn(async (item, action) => { + publishCalls.push(item); + action(); + }), + publishJobs: [], + onIdle: jest.fn().mockResolvedValue(undefined) + })); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (publishingService as any).publishCalls = publishCalls; }); beforeAll(async () => { @@ -1181,8 +1190,9 @@ describe('content-item import command', () => { const matches = await mockContent.filterMatch(templates, '', false); expect(matches.length).toEqual(templates.length); + // eslint-disable-next-line @typescript-eslint/no-explicit-any - expect((publish as any).publishCalls.length).toEqual(2); + expect(publishCalls.length).toEqual(2); await rimraf(`temp_${process.env.JEST_WORKER_ID}/import/publish/`); }); @@ -1239,7 +1249,7 @@ describe('content-item import command', () => { expect(mockContent.metrics.itemsUpdated).toEqual(3); // eslint-disable-next-line @typescript-eslint/no-explicit-any - expect((publish as any).publishCalls.length).toEqual(1); // One of the circular dependancies will be published. + expect(publishCalls.length).toEqual(1); // One of the circular dependancies will be published. const matches = await mockContent.filterMatch(templates, '', false); diff --git a/src/commands/content-item/import.ts b/src/commands/content-item/import.ts index 8afb30a9..a9b7b32d 100644 --- a/src/commands/content-item/import.ts +++ b/src/commands/content-item/import.ts @@ -31,9 +31,12 @@ import { Body } from '../../common/content-item/body'; import { AmplienceSchemaValidator, defaultSchemaLookup } from '../../common/content-item/amplience-schema-validator'; import { createLog, getDefaultLogPath } from '../../common/log-helpers'; import { asyncQuestion } from '../../common/question-helpers'; -import { MAX_PUBLISH_RATE_LIMIT, PublishQueue } from '../../common/import/publish-queue'; import { MediaRewriter } from '../../common/media/media-rewriter'; import { progressBar } from '../../common/progress-bar/progress-bar'; +import { BurstableQueueOptions, RESERVOIR_REFRESH_AMOUNT } from '../../common/burstable-queue/burstable-queue'; +import { PublishingService } from '../../common/publishing/publishing-service'; +import { PublishingJobService } from '../../common/publishing/publishing-job-service'; +import PublishOptions from '../../common/publish/publish-options'; export function getDefaultMappingPath(name: string, platform: string = process.platform): string { return join( @@ -106,12 +109,6 @@ export const builder = (yargs: Argv): void => { 'Publish any content items that either made a new version on import, or were published more recently in the JSON.' }) - .option('batchPublish', { - type: 'boolean', - boolean: true, - describe: 'Batch publish requests up to the rate limit. (35/min)' - }) - .option('republish', { type: 'boolean', boolean: true, @@ -120,7 +117,7 @@ export const builder = (yargs: Argv): void => { .options('publishRateLimit', { type: 'number', - describe: `Set the number of publishes per minute (max = ${MAX_PUBLISH_RATE_LIMIT})` + describe: `Set the number of publishes per minute (max = ${RESERVOIR_REFRESH_AMOUNT})` }) .option('excludeKeys', { @@ -890,46 +887,59 @@ const importTree = async ( } if (argv.publish) { - const pubQueue = new PublishQueue(argv); + const options: BurstableQueueOptions = {}; + log.appendLine(`Publishing ${publishable.length} items. (${publishChildren} children included)`); - if (!argv.batchPublish) { - pubQueue.maxWaiting = 1; + if (argv.publishRateLimit) { + const rate: number = parseInt(argv.publishRateLimit.toString()); + options.sustainedIntervalCap = rate; } + const publishingService = new PublishingService(options); + for (let i = 0; i < publishable.length; i++) { const item = publishable[i].item; try { - await pubQueue.publish(item); - log.appendLine(`Started publish for ${item.label}.`); + await publishingService.publish(item, () => { + log.appendLine(`Initiating publish for "${item.label}"`); + }); } catch (e) { log.appendLine(`Failed to initiate publish for ${item.label}: ${e.toString()}`); } } - log.appendLine(`Waiting for all publishes to complete...`); + log.appendLine(`Waiting for all publish jobs to complete...`); - let keepWaiting = true; + const client = dynamicContentClientFactory(argv); + const publishingJobService = new PublishingJobService(client); - while (!pubQueue.isEmpty() && keepWaiting) { - await pubQueue.waitForAll(); + for (const publishJob of publishingService.publishJobs) { + publishingJobService.check(publishJob, async () => { + log.appendLine(`trying to retry publish ${publishJob.label}`); + }); + } - if (pubQueue.unresolvedJobs.length > 0) { + let keepWaiting = true; + while (publishingJobService.size > 0 && keepWaiting) { + if (publishingJobService.pendingSize > 0) { keepWaiting = await asyncQuestion( 'Some publishes are taking longer than expected, would you like to continue waiting? (Y/n)' ); } } - log.appendLine(`Finished publishing, with ${pubQueue.unresolvedJobs.length} unresolved publishes`); - pubQueue.unresolvedJobs.forEach(job => { - log.appendLine(` - ${job.item.label}`); + await publishingService.onIdle(); + + log.appendLine(`Finished publishing, with ${publishingJobService.pendingSize} unresolved publish jobs`); + publishingJobService.pendingPublishingContentItems.forEach(item => { + log.appendLine(` - ${item.label}`); }); - log.appendLine(`Finished publishing, with ${pubQueue.failedJobs.length} failed publishes total`); - pubQueue.failedJobs.forEach(job => { - log.appendLine(` - ${job.item.label}`); + log.appendLine(`Finished publishing, with ${publishingJobService.failedJobs.length} failed publish jobs`); + publishingJobService.failedPublishingContentItems.forEach(item => { + log.appendLine(` - ${item.label}`); }); } @@ -938,7 +948,7 @@ const importTree = async ( }; export const handler = async ( - argv: Arguments + argv: Arguments ): Promise => { if (await argv.revertLog) { return revert(argv); From 19084d9e1170ceda7d0c0aeb865915a844ff38f1 Mon Sep 17 00:00:00 2001 From: DB Date: Fri, 5 Sep 2025 12:54:16 +0100 Subject: [PATCH 07/43] refactor: update return types and publishing service actions - wip --- src/commands/content-item/import.spec.ts | 2 +- src/commands/content-item/import.ts | 72 ++++++----------- src/commands/content-item/publish.spec.ts | 42 ++++++---- src/commands/content-item/publish.ts | 81 +++++++------------ .../content-item-publishing-job-service.ts | 41 ++++++++++ .../content-item-publishing-service.ts | 28 +++++++ .../publishing/publishing-job-service.ts | 71 ---------------- src/common/publishing/publishing-service.ts | 27 ------- 8 files changed, 152 insertions(+), 212 deletions(-) create mode 100644 src/common/publishing/content-item-publishing-job-service.ts create mode 100644 src/common/publishing/content-item-publishing-service.ts delete mode 100644 src/common/publishing/publishing-job-service.ts delete mode 100644 src/common/publishing/publishing-service.ts diff --git a/src/commands/content-item/import.spec.ts b/src/commands/content-item/import.spec.ts index 5df09786..1e142fef 100644 --- a/src/commands/content-item/import.spec.ts +++ b/src/commands/content-item/import.spec.ts @@ -1,7 +1,7 @@ import { builder, command, handler, LOG_FILENAME, getDefaultMappingPath } from './import'; import { dependsOn, dependantType } from './__mocks__/dependant-content-helper'; import * as reverter from './import-revert'; -import * as publishingService from '../../common/publishing/publishing-service'; +import * as publishingService from '../../common/publishing/content-item-publishing-service'; import { createLog, getDefaultLogPath } from '../../common/log-helpers'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { Folder, ContentType } from 'dc-management-sdk-js'; diff --git a/src/commands/content-item/import.ts b/src/commands/content-item/import.ts index a9b7b32d..e8183b8e 100644 --- a/src/commands/content-item/import.ts +++ b/src/commands/content-item/import.ts @@ -17,7 +17,8 @@ import { ContentRepository, ContentType, ContentTypeSchema, - Status + Status, + PublishingJob } from 'dc-management-sdk-js'; import { ContentMapping } from '../../common/content-mapping'; import { @@ -33,10 +34,10 @@ import { createLog, getDefaultLogPath } from '../../common/log-helpers'; import { asyncQuestion } from '../../common/question-helpers'; import { MediaRewriter } from '../../common/media/media-rewriter'; import { progressBar } from '../../common/progress-bar/progress-bar'; -import { BurstableQueueOptions, RESERVOIR_REFRESH_AMOUNT } from '../../common/burstable-queue/burstable-queue'; -import { PublishingService } from '../../common/publishing/publishing-service'; -import { PublishingJobService } from '../../common/publishing/publishing-job-service'; +import { ContentItemPublishingService } from '../../common/publishing/content-item-publishing-service'; import PublishOptions from '../../common/publish/publish-options'; +import { ContentItemPublishingJobService } from '../../common/publishing/content-item-publishing-job-service'; +import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingJobStatus'; export function getDefaultMappingPath(name: string, platform: string = process.platform): string { return join( @@ -115,11 +116,6 @@ export const builder = (yargs: Argv): void => { describe: 'Republish content items regardless of whether the import changed them or not. (--publish not required)' }) - .options('publishRateLimit', { - type: 'number', - describe: `Set the number of publishes per minute (max = ${RESERVOIR_REFRESH_AMOUNT})` - }) - .option('excludeKeys', { type: 'boolean', boolean: true, @@ -887,63 +883,47 @@ const importTree = async ( } if (argv.publish) { - const options: BurstableQueueOptions = {}; - log.appendLine(`Publishing ${publishable.length} items. (${publishChildren} children included)`); - if (argv.publishRateLimit) { - const rate: number = parseInt(argv.publishRateLimit.toString()); - options.sustainedIntervalCap = rate; - } - - const publishingService = new PublishingService(options); + const publishingService = new ContentItemPublishingService(); + const contentItemPublishJobs: [ContentItem, PublishingJob][] = []; for (let i = 0; i < publishable.length; i++) { const item = publishable[i].item; try { - await publishingService.publish(item, () => { - log.appendLine(`Initiating publish for "${item.label}"`); + await publishingService.publish(item, (contentItem, publishingJob) => { + contentItemPublishJobs.push([contentItem, publishingJob]); + + log.appendLine(`Initiated publish for "${item.label}"`); }); } catch (e) { log.appendLine(`Failed to initiate publish for ${item.label}: ${e.toString()}`); } } - log.appendLine(`Waiting for all publish jobs to complete...`); + await publishingService.onIdle(); - const client = dynamicContentClientFactory(argv); - const publishingJobService = new PublishingJobService(client); + const checkPublishJobs = await asyncQuestion( + 'All publishes have been requested, would you like to wait for all publishes to complete? (Y/n)' + ); - for (const publishJob of publishingService.publishJobs) { - publishingJobService.check(publishJob, async () => { - log.appendLine(`trying to retry publish ${publishJob.label}`); - }); - } + if (checkPublishJobs) { + const publishingJobService = new ContentItemPublishingJobService(client); - let keepWaiting = true; - while (publishingJobService.size > 0 && keepWaiting) { - if (publishingJobService.pendingSize > 0) { - keepWaiting = await asyncQuestion( - 'Some publishes are taking longer than expected, would you like to continue waiting? (Y/n)' - ); + for (const [contentItem, publishingJob] of contentItemPublishJobs) { + publishingJobService.check(publishingJob, async resolvedPublishingJob => { + if (resolvedPublishingJob.state === PublishingJobStatus.FAILED) { + log.appendLine(`Failed to publish ${contentItem.label}: ${resolvedPublishingJob.publishErrorStatus}`); + } + }); } - } - - await publishingService.onIdle(); - - log.appendLine(`Finished publishing, with ${publishingJobService.pendingSize} unresolved publish jobs`); - publishingJobService.pendingPublishingContentItems.forEach(item => { - log.appendLine(` - ${item.label}`); - }); - log.appendLine(`Finished publishing, with ${publishingJobService.failedJobs.length} failed publish jobs`); - publishingJobService.failedPublishingContentItems.forEach(item => { - log.appendLine(` - ${item.label}`); - }); + await publishingJobService.onIdle(); + } } - log.appendLine('Done!'); + log.appendLine('Publishing complete'); return true; }; diff --git a/src/commands/content-item/publish.spec.ts b/src/commands/content-item/publish.spec.ts index f749b758..f88e59ca 100644 --- a/src/commands/content-item/publish.spec.ts +++ b/src/commands/content-item/publish.spec.ts @@ -1,16 +1,18 @@ import { builder, handler, getContentItems, processItems, LOG_FILENAME, coerceLog } from './publish'; -import { Status, ContentItem, DynamicContent, Hub } from 'dc-management-sdk-js'; +import { Status, ContentItem, DynamicContent, Hub, PublishingJobLocation, PublishingJob } from 'dc-management-sdk-js'; import { FileLog } from '../../common/file-log'; -import * as publishingService from '../../common/publishing/publishing-service'; +import * as publishingService from '../../common/publishing/content-item-publishing-service'; import { Arguments } from 'yargs'; import { ConfigurationParameters } from '../configure'; import PublishOptions from '../../common/publish/publish-options'; import Yargs from 'yargs/yargs'; +import readline from 'readline'; jest.mock('../../services/dynamic-content-client-factory'); jest.mock('../../common/content-item/confirm-all-content'); jest.mock('../../common/log-helpers'); jest.mock('../../common/filter/fetch-content'); +jest.mock('readline'); const mockClient = { contentItems: { @@ -28,15 +30,6 @@ const mockLog = { }) } as unknown as FileLog; -const argv: Arguments = { - $0: '', - _: [], - clientId: 'client-id', - clientSecret: 'client-secret', - hubId: 'hub-id', - batchPublish: false -} as Arguments; - describe('publish tests', () => { describe('builder tests', () => { it('should configure yargs', function () { @@ -173,7 +166,7 @@ describe('publish tests', () => { logFile: mockLog, allContent: false, missingContent: false, - argv + client: mockClient }); expect(console.log).toHaveBeenCalledWith('Nothing found to publish, aborting.'); @@ -191,7 +184,7 @@ describe('publish tests', () => { logFile: mockLog, allContent: false, missingContent: false, - argv + client: mockClient }); expect(confirmAllContent).toHaveBeenCalled(); @@ -200,6 +193,22 @@ describe('publish tests', () => { it('should process all items and call publish', async () => { const contentItem = new ContentItem({ id: '1', label: 'Publish Me', body: { _meta: {} } }); + const mockedPublishingJob = jest.fn(); + const mockedPublish = jest.fn(); + + const publishingJobLocation = new PublishingJobLocation({ + location: 'https://api.amplience.net/v2/content/publishing-jobs/68adcb6c1ad05f3b50ebc821' + }); + + publishingJobLocation.related.publishingJob = mockedPublishingJob.mockResolvedValue(new PublishingJob()); + + contentItem.related.publish = mockedPublish.mockResolvedValue(publishingJobLocation); + + mockClient.contentItems.get = jest.fn().mockResolvedValue(contentItem); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (readline as any).setResponses(['Y']); + await processItems({ contentItems: [contentItem], force: true, @@ -207,10 +216,11 @@ describe('publish tests', () => { logFile: mockLog, allContent: false, missingContent: false, - argv + client: mockClient }); - expect(publishCalls.length).toBe(1); + expect(mockedPublish).toHaveBeenCalledTimes(1); + expect(mockedPublishingJob).toHaveBeenCalledTimes(1); }); it('should process all items while filtering out any dependencies and call publish', async () => { @@ -239,7 +249,7 @@ describe('publish tests', () => { logFile: mockLog, allContent: false, missingContent: false, - argv + client: mockClient }); // eslint-disable-next-line @typescript-eslint/no-explicit-any diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index e66546fe..09fff26b 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -4,7 +4,7 @@ import { ConfigurationParameters } from '../configure'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { confirmAllContent } from '../../common/content-item/confirm-all-content'; import PublishOptions from '../../common/publish/publish-options'; -import { ContentItem, ContentRepository, DynamicContent, Status } from 'dc-management-sdk-js'; +import { ContentItem, ContentRepository, DynamicContent, PublishingJob, Status } from 'dc-management-sdk-js'; import { getDefaultLogPath, createLog } from '../../common/log-helpers'; import { FileLog } from '../../common/file-log'; import { withOldFilters } from '../../common/filter/facet'; @@ -12,9 +12,9 @@ import { getContent } from '../../common/filter/fetch-content'; import { asyncQuestion } from '../../common/question-helpers'; import { ContentDependancyTree } from '../../common/content-item/content-dependancy-tree'; import { ContentMapping } from '../../common/content-mapping'; -import { PublishingService } from '../../common/publishing/publishing-service'; -import { PublishingJobService } from '../../common/publishing/publishing-job-service'; -import { BurstableQueueOptions, RESERVOIR_REFRESH_AMOUNT } from '../../common/burstable-queue/burstable-queue'; +import { ContentItemPublishingService } from '../../common/publishing/content-item-publishing-service'; +import { ContentItemPublishingJobService } from '../../common/publishing/content-item-publishing-job-service'; +import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingJobStatus'; export const command = 'publish [id]'; @@ -47,11 +47,6 @@ export const builder = (yargs: Argv): void => { describe: "Publish content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme." }) - .alias('publishRateLimit', 'publishRateLimit') - .options('publishRateLimit', { - type: 'number', - describe: `Set the number of publishes per minute (max = ${RESERVOIR_REFRESH_AMOUNT})` - }) .alias('f', 'force') .option('f', { type: 'boolean', @@ -130,21 +125,21 @@ export const getContentItems = async ({ }; export const processItems = async ({ + client, contentItems, force, silent, logFile, allContent, - missingContent, - argv + missingContent }: { + client: DynamicContent; contentItems: ContentItem[]; force?: boolean; silent?: boolean; logFile: FileLog; allContent: boolean; missingContent: boolean; - argv: Arguments; }): Promise => { if (contentItems.length == 0) { console.log('Nothing found to publish, aborting.'); @@ -188,58 +183,42 @@ export const processItems = async ({ log.appendLine(`Publishing ${rootContentItems.length} items.`); - const options: BurstableQueueOptions = {}; - - if (argv.publishRateLimit) { - const rate: number = parseInt(argv.publishRateLimit.toString()); - options.sustainedIntervalCap = rate; - } - - const publishingService = new PublishingService(options); + const publishingService = new ContentItemPublishingService(); + const contentItemPublishJobs: [ContentItem, PublishingJob][] = []; for (const item of rootContentItems) { try { - await publishingService.publish(item, () => { - log.appendLine(`Initiating publish for "${item.label}"`); + await publishingService.publish(item, (contentItem, publishingJob) => { + contentItemPublishJobs.push([contentItem, publishingJob]); + + log.appendLine(`Initiated publish for "${item.label}"`); }); } catch (e) { log.appendLine(`Failed to initiate publish for ${item.label}: ${e.toString()}`); } } - log.appendLine(`Waiting for all publish jobs to complete...`); + await publishingService.onIdle(); - const client = dynamicContentClientFactory(argv); - const publishingJobService = new PublishingJobService(client); + const checkPublishJobs = await asyncQuestion( + 'All publishes have been requested, would you like to wait for all publishes to complete? (Y/n)' + ); - for (const publishJob of publishingService.publishJobs) { - publishingJobService.check(publishJob, async () => { - log.appendLine(`trying to retry publish ${publishJob.label}`); - }); - } + if (checkPublishJobs) { + const publishingJobService = new ContentItemPublishingJobService(client); - let keepWaiting = true; - while (publishingJobService.size > 0 && keepWaiting) { - if (publishingJobService.pendingSize > 0) { - keepWaiting = await asyncQuestion( - 'Some publishes are taking longer than expected, would you like to continue waiting? (Y/n)' - ); + for (const [contentItem, publishingJob] of contentItemPublishJobs) { + publishingJobService.check(publishingJob, async resolvedPublishingJob => { + if (resolvedPublishingJob.state === PublishingJobStatus.FAILED) { + log.appendLine(`Failed to publish ${contentItem.label}: ${resolvedPublishingJob.publishErrorStatus}`); + } + }); } - } - - await publishingService.onIdle(); - log.appendLine(`Finished publishing, with ${publishingJobService.pendingSize} unresolved publish jobs`); - publishingJobService.pendingPublishingContentItems.forEach(item => { - log.appendLine(` - ${item.label}`); - }); - - log.appendLine(`Finished publishing, with ${publishingJobService.failedJobs.length} failed publish jobs`); - publishingJobService.failedPublishingContentItems.forEach(item => { - log.appendLine(` - ${item.label}`); - }); + await publishingJobService.onIdle(); + } - log.appendLine(`Publish complete`); + log.appendLine(`Publishing complete`); await log.close(!silent); }; @@ -279,12 +258,12 @@ export const handler = async (argv: Arguments Promise) { + this.queue.add(async () => { + const latestPublishJob = await this.client.publishingJob.get(publishingJob.id); + + if ( + latestPublishJob.state === PublishingJobStatus.FAILED || + latestPublishJob.state === PublishingJobStatus.COMPLETED + ) { + action(latestPublishJob); + } else { + // if publish has not been done then add it to the back of the queue + this.check(latestPublishJob, action); + } + }); + } + + async onIdle() { + return this.queue.onIdle(); + } + + get size() { + return this.queue.size(); + } + + get pending() { + return this.queue.pending(); + } +} diff --git a/src/common/publishing/content-item-publishing-service.ts b/src/common/publishing/content-item-publishing-service.ts new file mode 100644 index 00000000..ef93c930 --- /dev/null +++ b/src/common/publishing/content-item-publishing-service.ts @@ -0,0 +1,28 @@ +import { ContentItem, PublishingJob } from 'dc-management-sdk-js'; +import { BurstableQueue } from '../burstable-queue/burstable-queue'; + +export class ContentItemPublishingService { + private queue; + private _publishJobs: PublishingJob[] = []; + + constructor() { + this.queue = new BurstableQueue({}); + } + + async publish(contentItem: ContentItem, action: (contentItem: ContentItem, publishJob: PublishingJob) => void) { + this.queue.add(async () => { + const publishJobLocation = await contentItem.related.publish(); + const publishJob = await publishJobLocation.related.publishingJob(); + this._publishJobs.push(publishJob); + action(contentItem, publishJob); + }); + } + + get publishJobs() { + return this._publishJobs; + } + + async onIdle() { + return this.queue.onIdle(); + } +} diff --git a/src/common/publishing/publishing-job-service.ts b/src/common/publishing/publishing-job-service.ts deleted file mode 100644 index 62d0b6e7..00000000 --- a/src/common/publishing/publishing-job-service.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { ContentItem, DynamicContent, PublishingJob } from 'dc-management-sdk-js'; -import { BurstableQueue } from '../burstable-queue/burstable-queue'; -import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingStatus'; - -export class PublishingJobService { - private client; - private queue; - private resolvedPublishJobs: PublishingJob[] = []; - private pendingContentItems: ContentItem[] = []; - private failedContentItems: ContentItem[] = []; - - constructor(client: DynamicContent) { - this.client = client; - this.queue = new BurstableQueue({}); - } - - async check(publishJob: ContentItem, action: () => Promise) { - this.queue.add(async () => { - this.pendingContentItems.push(publishJob); - const checkedPublishJob = await this.client.publishingJobs.get(publishJob.id); - - if (checkedPublishJob.state === PublishingJobStatus.FAILED) { - this.failedContentItems.push(publishJob); - } - - if ( - checkedPublishJob.state === PublishingJobStatus.FAILED || - checkedPublishJob.state === PublishingJobStatus.COMPLETED - ) { - this.resolvedPublishJobs.push(checkedPublishJob); - - const index = this.pendingContentItems.indexOf(publishJob); - if (index > -1) { - this.pendingContentItems.splice(index, 1); - } - } else { - // if publish has not been done then add it to the back of the queue - this.check(publishJob, action); - } - action(); - }); - } - - async onIdle() { - return this.queue.onIdle(); - } - - completeJobs() { - return this.resolvedPublishJobs.find(job => job.state === 'COMPLETED'); - } - - failedJobs() { - return this.resolvedPublishJobs.find(job => job.state === 'FAILED'); - } - - get size() { - return this.queue.size(); - } - - get pendingSize() { - return this.queue.pending(); - } - - get pendingPublishingContentItems() { - return this.pendingContentItems; - } - - get failedPublishingContentItems() { - return this.failedContentItems; - } -} diff --git a/src/common/publishing/publishing-service.ts b/src/common/publishing/publishing-service.ts deleted file mode 100644 index be9efaec..00000000 --- a/src/common/publishing/publishing-service.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { ContentItem } from 'dc-management-sdk-js'; -import { BurstableQueue, BurstableQueueOptions } from '../burstable-queue/burstable-queue'; - -export class PublishingService { - private queue; - private _publishJobs: ContentItem[] = []; - - constructor(options: BurstableQueueOptions) { - this.queue = new BurstableQueue(options); - } - - async publish(contentItem: ContentItem, action: () => void) { - this.queue.add(async () => { - const publishJob = await contentItem.related.publish(); - this._publishJobs.push(publishJob); - action(); - }); - } - - get publishJobs() { - return this._publishJobs; - } - - async onIdle() { - return this.queue.onIdle(); - } -} From a4728ca8d3807efc2d3d3767e210a567d1a32237 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 5 Sep 2025 15:07:45 +0100 Subject: [PATCH 08/43] feat: pass headers via custom dc http client to match sdk --- .../http-client/dc-http-client.ts | 46 +++++++++---------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/src/common/dc-management-sdk-js/http-client/dc-http-client.ts b/src/common/dc-management-sdk-js/http-client/dc-http-client.ts index a3a58962..a53fadd4 100644 --- a/src/common/dc-management-sdk-js/http-client/dc-http-client.ts +++ b/src/common/dc-management-sdk-js/http-client/dc-http-client.ts @@ -34,9 +34,6 @@ const DEFAULT_RETRY_CONFIG: IAxiosRetryConfig = { } }; -/** - * @hidden - */ export class DCHttpClient implements HttpClient { public client: AxiosInstance; @@ -46,31 +43,34 @@ export class DCHttpClient implements HttpClient { } public async request(config: HttpRequest): Promise { - try { - const response = await this.client.request({ + return this.client + .request({ data: config.data, headers: config.headers, method: config.method, url: config.url - }); - return { - data: response.data, - status: response.status - }; - } catch (error) { - if (error?.response) { - return { - data: error.response.data, - status: error.response.status - }; - } - if (error?.code) { + }) + .then(response => { return { - data: { message: error.message }, - status: error.code + headers: response.headers, + data: response.data, + status: response.status }; - } - return error; - } + }) + .catch(error => { + if (error?.response) { + return { + data: error.response.data, + status: error.response.status + }; + } + if (error?.code) { + return { + data: { message: error.message }, + status: error.code + }; + } + return error; + }); } } From 8ca20d1f25e825d718d25ab47927111f29045a97 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 5 Sep 2025 15:58:41 +0100 Subject: [PATCH 09/43] test: updating import tests to use new publishing service --- src/commands/content-item/import.spec.ts | 71 ++-- src/common/import/__mocks__/publish-queue.ts | 34 -- src/common/import/publish-queue.spec.ts | 406 ------------------- src/common/import/publish-queue.ts | 210 ---------- 4 files changed, 41 insertions(+), 680 deletions(-) delete mode 100644 src/common/import/__mocks__/publish-queue.ts delete mode 100644 src/common/import/publish-queue.spec.ts delete mode 100644 src/common/import/publish-queue.ts diff --git a/src/commands/content-item/import.spec.ts b/src/commands/content-item/import.spec.ts index 1e142fef..c4e6127a 100644 --- a/src/commands/content-item/import.spec.ts +++ b/src/commands/content-item/import.spec.ts @@ -1,10 +1,9 @@ import { builder, command, handler, LOG_FILENAME, getDefaultMappingPath } from './import'; import { dependsOn, dependantType } from './__mocks__/dependant-content-helper'; import * as reverter from './import-revert'; -import * as publishingService from '../../common/publishing/content-item-publishing-service'; import { createLog, getDefaultLogPath } from '../../common/log-helpers'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; -import { Folder, ContentType } from 'dc-management-sdk-js'; +import { Folder, ContentType, PublishingJob } from 'dc-management-sdk-js'; import Yargs from 'yargs/yargs'; import { writeFile } from 'fs'; import { join, dirname, basename } from 'path'; @@ -16,6 +15,14 @@ import { ensureDirectoryExists } from '../../common/import/directory-utils'; import { MockContent, ItemTemplate } from '../../common/dc-management-sdk-js/mock-content'; import { FileLog } from '../../common/file-log'; import { MediaRewriter } from '../../common/media/media-rewriter'; +import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingJobStatus'; + +const mockPublish = jest.fn().mockImplementation((contentItems, fn) => { + fn(contentItems); +}); +const mockCheck = jest.fn().mockImplementation((publishingJob, fn) => { + fn(new PublishingJob({ state: PublishingJobStatus.COMPLETED })); +}); jest.mock('readline'); jest.mock('./import-revert'); @@ -25,6 +32,26 @@ jest.mock('../../common/log-helpers', () => ({ ...jest.requireActual('../../common/log-helpers'), getDefaultLogPath: jest.fn() })); +jest.mock('../../common/publishing/content-item-publishing-service', () => { + return { + ContentItemPublishingService: jest.fn().mockImplementation(() => { + return { + publish: mockPublish, + onIdle: jest.fn() + }; + }) + }; +}); +jest.mock('../../common/publishing/content-item-publishing-job-service', () => { + return { + ContentItemPublishingJobService: jest.fn().mockImplementation(() => { + return { + check: mockCheck, + onIdle: jest.fn() + }; + }) + }; +}); function rimraf(dir: string): Promise { return new Promise((resolve): void => { @@ -160,28 +187,10 @@ describe('content-item import command', () => { revertLog: Promise.resolve(undefined) }; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let publishCalls: any[]; - beforeEach(async () => { jest.clearAllMocks(); jest.mock('readline'); jest.mock('../../services/dynamic-content-client-factory'); - - publishCalls = []; - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (publishingService as any).PublishingService = jest.fn().mockImplementation(() => ({ - publish: jest.fn(async (item, action) => { - publishCalls.push(item); - action(); - }), - publishJobs: [], - onIdle: jest.fn().mockResolvedValue(undefined) - })); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (publishingService as any).publishCalls = publishCalls; }); beforeAll(async () => { @@ -1182,17 +1191,20 @@ describe('content-item import command', () => { dir: `temp_${process.env.JEST_WORKER_ID}/import/publish/`, mapFile: `temp_${process.env.JEST_WORKER_ID}/import/publish.json`, baseRepo: 'targetRepo', - publish: true, - batchPublish: true + publish: true }; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (readline as any).setResponses(['Y']); + await handler(argv); const matches = await mockContent.filterMatch(templates, '', false); expect(matches.length).toEqual(templates.length); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - expect(publishCalls.length).toEqual(2); + expect(mockPublish).toHaveBeenCalledTimes(2); + expect(mockCheck).toHaveBeenCalledTimes(2); await rimraf(`temp_${process.env.JEST_WORKER_ID}/import/publish/`); }); @@ -1238,19 +1250,18 @@ describe('content-item import command', () => { dir: `temp_${process.env.JEST_WORKER_ID}/import/circular/`, mapFile: `temp_${process.env.JEST_WORKER_ID}/import/circular.json`, baseRepo: 'targetRepo', - publish: true, - batchPublish: false + publish: true }; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (readline as any).setResponses(['Y']); + await handler(argv); // check items were created appropriately - expect(mockContent.metrics.itemsCreated).toEqual(4); expect(mockContent.metrics.itemsUpdated).toEqual(3); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - expect(publishCalls.length).toEqual(1); // One of the circular dependancies will be published. - const matches = await mockContent.filterMatch(templates, '', false); expect(matches.length).toEqual(templates.length); diff --git a/src/common/import/__mocks__/publish-queue.ts b/src/common/import/__mocks__/publish-queue.ts deleted file mode 100644 index 608c0a21..00000000 --- a/src/common/import/__mocks__/publish-queue.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { ContentItem } from 'dc-management-sdk-js'; -import { JobRequest } from '../publish-queue'; - -export const publishCalls: ContentItem[] = []; - -export class PublishQueue { - maxWaiting = 3; - attemptDelay = 1000; - failedJobs: JobRequest[] = []; - unresolvedJobs: JobRequest[] = []; - waitInProgress = false; - - constructor() { - /* empty */ - } - - async publish(item: ContentItem): Promise { - // TODO: testing ability to throw - - publishCalls.push(item); - - return; - } - - async waitForAll(): Promise { - // TODO: testing ability to throw (in wait for publish) - - return; - } - - isEmpty() { - return true; - } -} diff --git a/src/common/import/publish-queue.spec.ts b/src/common/import/publish-queue.spec.ts deleted file mode 100644 index da1010e5..00000000 --- a/src/common/import/publish-queue.spec.ts +++ /dev/null @@ -1,406 +0,0 @@ -import fetch from 'node-fetch'; -import { Oauth2AuthHeaderProvider, ContentItem } from 'dc-management-sdk-js'; -import { PublishingJob, PublishQueue } from './publish-queue'; -import * as publishQueueModule from './publish-queue'; - -jest.mock('node-fetch'); -jest.mock('dc-management-sdk-js/build/main/lib/oauth2/services/Oauth2AuthHeaderProvider'); - -interface PublishTemplate { - href: string; - status: number; - statusText: string; - headers?: Map; - - jsonProvider: (template: PublishTemplate) => PublishingJob; -} - -const defaultTemplate: PublishTemplate = { - href: '', - status: 404, - statusText: 'NOT_FOUND', - - jsonProvider: () => { - throw new Error('Not valid JSON'); - } -}; - -describe('publish-queue', () => { - describe('publishing tests', () => { - let totalPolls = 0; - let totalRequests = 0; - let authRequests = 0; - - beforeEach((): void => { - totalRequests = 0; - totalPolls = 0; - authRequests = 0; - - jest.spyOn(publishQueueModule, 'delay').mockResolvedValue(); - }); - - afterEach((): void => { - jest.resetAllMocks(); - }); - - // should wait for all publishes to complete when calling waitForAll - - function sharedMock(templates: PublishTemplate[]): void { - (Oauth2AuthHeaderProvider.prototype.getAuthHeader as jest.Mock).mockImplementation(() => { - authRequests++; - const result = 'bearer token-example'; - return Promise.resolve(result); - }); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (fetch as any as jest.Mock).mockImplementation((href, options) => { - const template: PublishTemplate = templates.find(template => template.href == href) || defaultTemplate; - if (options.headers['Authorization'] != 'bearer token-example') { - throw new Error('Not authorized!'); - } - - totalRequests++; - - return Promise.resolve({ - status: template.status, - statusText: template.statusText, - headers: template.headers, - json: jest.fn().mockImplementation(() => Promise.resolve(template.jsonProvider(template))), - text: jest.fn().mockResolvedValue('Error Text') - }); - }); - } - - function getPublishableItem(id: string): ContentItem { - return new ContentItem({ - id: id, - _links: { - publish: { - href: '//publish-' + id - } - } - }); - } - - function publishStartTemplate(href: string, location: string): PublishTemplate { - return { - href: href, - status: 204, - statusText: 'No Content', - headers: new Map([['Location', location]]), - - jsonProvider: (): PublishingJob => { - throw new Error('No body'); - } - }; - } - - function progressStepsTemplate(href: string, polls: number, fail?: boolean | number): PublishTemplate { - let callNumber = 0; - - return { - href: href, - status: 200, - statusText: 'OK', - - jsonProvider: (): PublishingJob => { - const result: PublishingJob = { - id: href, - scheduledDate: '', - createdDate: '', - createdBy: '', - state: 'PREPARING', - _links: { self: { href } } - }; - - totalPolls++; - - if (typeof fail === 'number' && fail == callNumber) { - callNumber++; - throw new Error('Data does not parse.'); - } else { - if (callNumber == 0 && polls > 1) { - result.state = 'PREPARING'; - } else if (callNumber < polls - 1) { - result.state = 'PUBLISHING'; - } else { - result.state = fail === true ? 'FAILED' : 'COMPLETED'; - } - } - - callNumber++; - - return result; - } - }; - } - - function multiMock(count: number, polls: number): ContentItem[] { - const items: ContentItem[] = []; - const templates: PublishTemplate[] = []; - - for (let i = 0; i < count; i++) { - templates.push(publishStartTemplate(`//publish-id${i}`, `//publishJob-id${i}`)); - templates.push(progressStepsTemplate(`//publishJob-id${i}`, polls)); - - items.push(getPublishableItem(`id${i}`)); - } - - sharedMock(templates); - - return items; - } - - function makeQueue(maxWaiting: number): PublishQueue { - const queue = new PublishQueue({ clientId: 'id', clientSecret: 'secret', hubId: 'hub' }); - queue.attemptDelay = 0; - queue.maxWaiting = maxWaiting; - - return queue; - } - - it('should request a publish using the REST api, with authentication given by the creation arguments', async () => { - const item1 = getPublishableItem('id1'); - sharedMock([ - publishStartTemplate('//publish-id1', '//publishJob-id1'), - progressStepsTemplate('//publishJob-id1', 3) - ]); - - const queue = makeQueue(10); - - await queue.publish(item1); - - await queue.waitForAll(); - - expect(authRequests).toBeGreaterThan(0); - expect(totalRequests).toEqual(4); - expect(totalPolls).toEqual(3); - }); - - it('should wait for publish completion when hitting the concurrent limit and attempting to publish more', async () => { - const items = multiMock(10, 1); // 10 items, return success on the first poll (instant publish) - - const queue = makeQueue(5); // After 5 concurrent requests, start waiting. - - for (let i = 0; i < items.length; i++) { - await queue.publish(items[i]); - - // Starts polling when i == 5, and each time we continue one job has completed. - expect(totalPolls).toEqual(Math.max(0, i - 4)); - } - - await queue.waitForAll(); - - expect(totalPolls).toEqual(10); - }); - - it('should never wait for publish completion between publishes when less than the concurrent limit', async () => { - const items = multiMock(10, 1); // 10 items, return success on the first poll (instant publish) - - const queue = makeQueue(15); // After 15 concurrent requests, start waiting. - - for (let i = 0; i < items.length; i++) { - await queue.publish(items[i]); - } - - expect(totalPolls).toEqual(0); - - await queue.waitForAll(); - - expect(totalPolls).toEqual(10); - }); - - it('should complete immediately when calling waitForAll with no publishes in progress', async () => { - const queue = makeQueue(15); // After 15 concurrent requests, start waiting. - - await queue.waitForAll(); - - expect(totalPolls).toEqual(0); - }); - - it('should throw an error when publish link is not present', async () => { - const item1 = getPublishableItem('id1'); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (item1 as any)._links = {}; - sharedMock([ - publishStartTemplate('//publish-id1', '//publishJob-id1'), - progressStepsTemplate('//publishJob-id1', 3) - ]); - - const queue = makeQueue(15); - - let threw = false; - try { - await queue.publish(item1); - } catch (e) { - threw = true; - } - - expect(threw).toBeTruthy(); - - await queue.waitForAll(); - - expect(totalPolls).toEqual(0); - }); - - it('should throw an error when publish POST response headers do not include a Location for the job status', async () => { - const item1 = getPublishableItem('id1'); - sharedMock([ - { - href: '//publish-id1', - status: 204, - statusText: 'No Content', - headers: new Map(), - - jsonProvider: (): PublishingJob => { - throw new Error('No body'); - } - }, - progressStepsTemplate('//publishJob-id1', 3) - ]); - - const queue = makeQueue(15); - - let threw = false; - try { - await queue.publish(item1); - } catch (e) { - threw = true; - } - - expect(threw).toBeTruthy(); - - await queue.waitForAll(); - - expect(totalPolls).toEqual(0); - }); - - it('should throw an error when publish fails to start (request is not OK)', async () => { - const item1 = getPublishableItem('id1'); - sharedMock([ - { - href: '//publish-id1', - status: 500, - statusText: 'Internal Server Error', - - jsonProvider: (): PublishingJob => { - throw new Error('No body'); - } - }, - progressStepsTemplate('//publishJob-id1', 3) - ]); - - const queue = makeQueue(15); - - let threw = false; - try { - await queue.publish(item1); - } catch (e) { - threw = true; - } - - expect(threw).toBeTruthy(); - - await queue.waitForAll(); - - expect(totalPolls).toEqual(0); - }); - - it('should ignore an attempt waiting for job status if fetching it does not succeed, and request again later as usual', async () => { - const item1 = getPublishableItem('id1'); - - sharedMock([ - publishStartTemplate('//publish-id1', '//publishJob-id1'), - progressStepsTemplate('//publishJob-id1', 3, 1) - ]); - - const queue = makeQueue(15); - - await queue.publish(item1); - - await queue.waitForAll(); - - expect(queue.failedJobs.length).toEqual(0); - expect(totalPolls).toEqual(3); - expect(totalRequests).toEqual(4); - }); - - it('should report failed publishes in the failedJobs list', async () => { - const item1 = getPublishableItem('id1'); - const item2 = getPublishableItem('id2'); // fails - const item3 = getPublishableItem('id3'); // fails - - sharedMock([ - publishStartTemplate('//publish-id1', '//publishJob-id1'), - progressStepsTemplate('//publishJob-id1', 1), - publishStartTemplate('//publish-id2', '//publishJob-id2'), - progressStepsTemplate('//publishJob-id2', 1, true), - publishStartTemplate('//publish-id3', '//publishJob-id3'), - progressStepsTemplate('//publishJob-id3', 1, true) - ]); - - const queue = makeQueue(15); - - await queue.publish(item1); - await queue.publish(item2); - await queue.publish(item3); - - await queue.waitForAll(); - - expect(queue.failedJobs.length).toEqual(2); - expect(queue.failedJobs[0].item).toEqual(item2); - expect(queue.failedJobs[1].item).toEqual(item3); - expect(totalPolls).toEqual(3); - expect(totalRequests).toEqual(6); - }); - - it('should still correctly waitForAll if a previous publish is waiting to start', async () => { - const items = multiMock(10, 1); // 10 items, return success on the first poll (instant publish) - - const queue = makeQueue(5); // After 5 concurrent requests, start waiting. - - for (let i = 0; i < items.length; i++) { - // Deliberately avoid waiting after starting the first publish that would have to wait. - // This is an unlikely situation, but handling it consistently is useful. - - if (i < 5) { - await queue.publish(items[i]); - } else { - queue.publish(items[i]); - } - } - - await queue.waitForAll(); - - expect(totalPolls).toEqual(10); - - // Since we process requests instantly, the rate limit delay will be hit for each publish. - expect(publishQueueModule.delay).toHaveBeenCalledTimes(5); - }); - - it('should error publishes when waiting for a publish job exceeds the maxAttempts number', async () => { - const items = multiMock(10, 5); // 10 items, return success on the 5th poll (after our limit) - - const queue = makeQueue(5); // After 5 concurrent requests, start waiting. - queue.maxAttempts = 2; - - for (let i = 0; i < items.length; i++) { - await queue.publish(items[i]); - - if (queue.unresolvedJobs.length > 0) { - // The first job should have failed. - expect(i).toEqual(5); // We only waited for the first job after 0-4 were in the queue. - expect(queue.unresolvedJobs[0].item).toBe(items[0]); - break; - } - - expect(i).toBeLessThan(5); - } - - await queue.waitForAll(); - - expect(totalPolls).toEqual(14); // 6 total publish requests. 2 waits before each before giving up. - expect(queue.unresolvedJobs.length).toEqual(6); - }); - }); -}); diff --git a/src/common/import/publish-queue.ts b/src/common/import/publish-queue.ts deleted file mode 100644 index 55fceb87..00000000 --- a/src/common/import/publish-queue.ts +++ /dev/null @@ -1,210 +0,0 @@ -import { - ContentItem, - Oauth2AuthHeaderProvider, - AxiosHttpClient, - PatTokenAuthHeaderProvider -} from 'dc-management-sdk-js'; -import fetch, { Response } from 'node-fetch'; -import { HalLink } from 'dc-management-sdk-js/build/main/lib/hal/models/HalLink'; -import { ConfigurationParameters } from '../../commands/configure'; - -export const MAX_PUBLISH_RATE_LIMIT = 80; -export const DEFAULT_PUBLISH_RATE_LIMIT = 35; - -export interface PublishingJob { - id: string; - scheduledDate: string; - createdDate: string; - createdBy: string; - state: 'PREPARING' | 'PUBLISHING' | 'COMPLETED' | 'FAILED'; - - _links?: { [name: string]: HalLink }; -} - -export const delay = (duration: number): Promise => { - return new Promise((resolve): void => { - setTimeout(resolve, duration); - }); -}; - -export interface JobRequest { - item: ContentItem; - href: string; -} - -export interface PublishConfig { - publishRateLimit?: number; -} - -export class PublishQueue { - maxWaiting = 35; - maxAttempts = 30; - attemptDelay = 1000; - attemptRateLimit = 60000 / 35; // 35 publishes a minute. - failedJobs: JobRequest[] = []; - unresolvedJobs: JobRequest[] = []; - - private inProgressJobs: JobRequest[] = []; - private waitingList: { promise: Promise; resolver: () => void }[] = []; - private auth: Oauth2AuthHeaderProvider | PatTokenAuthHeaderProvider; - private awaitingAll: boolean; - private delayUntil: number[] = []; - private delayId = 0; - waitInProgress = false; - - constructor(config: ConfigurationParameters & PublishConfig) { - const http = new AxiosHttpClient({}); - if (config.clientId && config.clientSecret) { - this.auth = new Oauth2AuthHeaderProvider( - { client_id: config.clientId, client_secret: config.clientSecret }, - { authUrl: process.env.AUTH_URL || 'https://auth.amplience.net' }, - http - ); - } else if (config.patToken) { - this.auth = new PatTokenAuthHeaderProvider(config.patToken); - } - - const publishRateLimit = Math.min(config?.publishRateLimit || DEFAULT_PUBLISH_RATE_LIMIT, MAX_PUBLISH_RATE_LIMIT); - - this.attemptRateLimit = 60000 / publishRateLimit; - } - - private async fetch(href: string, method: string): Promise { - return await fetch(href, { method: method, headers: { Authorization: await this.auth.getAuthHeader() } }); - } - - async publish(item: ContentItem): Promise { - await this.rateLimit(); - - // Do publish - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const publishLink = (item._links as any)['publish']; - - if (publishLink == null) { - throw new Error('Cannot publish the item - link not available.'); - } - - // Need to manually fetch the publish endpoint. - - const publish = await this.fetch(publishLink.href, 'POST'); - if (publish.status != 204) { - throw new Error(`Failed to start publish: ${publish.statusText} - ${await publish.text()}`); - } - - const publishJobInfoHref = publish.headers.get('Location'); - - if (publishJobInfoHref == null) { - throw new Error('Expected publish job location in header. Has the publish workflow changed?'); - } - - this.inProgressJobs.push({ href: publishJobInfoHref, item }); - } - - private async waitForOldestPublish(): Promise { - if (this.inProgressJobs.length === 0) { - return; - } - - this.waitInProgress = true; - - const oldestJob = this.inProgressJobs[0]; - this.inProgressJobs.splice(0, 1); - - // Request the status for the oldest ID. - // If it's still not published/errored, then wait a bit and try again. - - let attempts = 0; - for (; attempts < this.maxAttempts; attempts++) { - let job: PublishingJob; - try { - job = await (await this.fetch(oldestJob.href, 'GET')).json(); - } catch (e) { - // Could not fetch job information. - continue; - } - - if (job.state === 'COMPLETED') { - break; - } else if (job.state === 'FAILED') { - this.failedJobs.push(oldestJob); - break; - } else { - await delay(this.attemptDelay); - } - } - - if (attempts == this.maxAttempts) { - this.unresolvedJobs.push(oldestJob); - } - - // The wait completed. Notify the first in the queue. - - const oldestWaiter = this.waitingList[0]; - if (oldestWaiter != null) { - this.waitingList.splice(0, 1); - - oldestWaiter.resolver(); // Resolve the promise. - } - - if (this.waitingList.length > 0 || this.awaitingAll) { - // Still more waiting. - await this.waitForOldestPublish(); - } else { - this.waitInProgress = false; - } - } - - private async rateLimit(): Promise { - // Rate limit by time. - const now = Date.now(); - - if (now < this.delayUntil[this.delayId] || 0) { - await delay(this.delayUntil[this.delayId] - now); - } - - this.delayUntil[this.delayId] = now + this.attemptRateLimit * this.maxWaiting; - this.delayId = (this.delayId + 1) % this.maxWaiting; - - // Rate limit by concurrent volume. - - if (this.inProgressJobs.length != this.maxWaiting) { - return; - } - - // We need to wait. - let resolver: () => void = () => { - /* */ - }; - const myPromise = new Promise((resolve): void => { - resolver = resolve; - }); - - this.waitingList.push({ promise: myPromise, resolver: resolver }); - - if (!this.waitInProgress) { - // Start a wait. - this.waitForOldestPublish(); - } - - await myPromise; - } - - async waitForAll(): Promise { - if (this.waitInProgress) { - // Wait for the last item on the list to complete. - await this.waitingList[this.waitingList.length - 1]?.promise; - } - - this.inProgressJobs = [...this.inProgressJobs, ...this.unresolvedJobs]; - - this.unresolvedJobs = []; - - // Continue regardless of waiters. - this.awaitingAll = true; - await this.waitForOldestPublish(); - } - - isEmpty() { - return this.inProgressJobs.length === 0 && this.unresolvedJobs.length === 0; - } -} From 29921b358881e6a3d613568f3182e803fca75e85 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 5 Sep 2025 16:07:36 +0100 Subject: [PATCH 10/43] feat: removing redundant publishing flags --- docs/CONTENT-ITEM.md | 27 ++++++------------- docs/HUB.md | 1 - src/commands/content-item/copy.spec.ts | 9 ------- src/commands/content-item/copy.ts | 7 ----- src/commands/content-item/move.spec.ts | 8 ------ src/commands/content-item/move.ts | 6 ----- src/commands/hub/clone.ts | 5 ---- src/common/publish/publish-options.ts | 1 - src/interfaces/clone-hub-builder-options.ts | 1 - .../copy-item-builder-options.interface.ts | 1 - .../import-item-builder-options.interface.ts | 1 - 11 files changed, 8 insertions(+), 59 deletions(-) diff --git a/docs/CONTENT-ITEM.md b/docs/CONTENT-ITEM.md index a7faf1d1..1baf6811 100644 --- a/docs/CONTENT-ITEM.md +++ b/docs/CONTENT-ITEM.md @@ -165,7 +165,6 @@ dc-cli content-item import | -v
--validate | [boolean] | Only recreate folder structure - content is validated but not imported. | | --skipIncomplete | [boolean] | Skip any content items that has one or more missing dependancy. | | --publish | [boolean] | Publish any content items that have an existing publish status in their JSON. | -| --batchPublish | [boolean] | Batch publish requests up to the rate limit. (35/min) | | --republish | [boolean] | Republish content items regardless of whether the import changed them or not.
(--publish not required) | | --excludeKeys | [boolean] | Exclude delivery keys when importing content items. | | --media | [boolean] | Detect and rewrite media links to match assets in the target account's Content Hub. Your client must have Content Hub permissions configured. | @@ -215,7 +214,6 @@ dc-cli content-item copy | --skipIncomplete | [boolean] | Skip any content item that has one or more missing dependancy. | | --lastPublish | [boolean] | When available, export the last published version of a content item rather than its newest version. | | --publish | [boolean] | Publish any content items that have an existing publish status in their JSON. | -| --batchPublish | [boolean] | Batch publish requests up to the rate limit. (35/min) | | --republish | [boolean] | Republish content items regardless of whether the import changed them or not.
(--publish not required) | | --excludeKeys | [boolean] | Exclude delivery keys when importing content items. | | --media | [boolean] | Detect and rewrite media links to match assets in the target account's DAM.
Your client must have DAM permissions configured. | @@ -265,7 +263,6 @@ dc-cli content-item move | --skipIncomplete | [boolean] | Skip any content item that has one or more missing dependancy. | | --lastPublish | [boolean] | When available, export the last published version of a content item rather than its newest version. | | --publish | [boolean] | Publish any content items that have an existing publish status in their JSON. | -| --batchPublish | [boolean] | Batch publish requests up to the rate limit. (35/min) | | --republish | [boolean] | Republish content items regardless of whether the import changed them or not.
(--publish not required) | | --excludeKeys | [boolean] | Exclude delivery keys when importing content items. | | --media | [boolean] | Detect and rewrite media links to match assets in the target account's DAM.
Your client must have DAM permissions configured. | @@ -402,16 +399,14 @@ If no `id` is provided, all content items in all content repositories in the spe #### Options -| Option | Alias | Description | -| -------------------- | ----- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -| `--repoId` | | The ID of a content repository to restrict publishing scope. _(Optional)_ | -| `--folderId` | | The ID of a folder to restrict publishing scope. _(Optional)_ | -| `--facet` | | Filter content using facets. Format:
`label:example name,locale:en-GB`
Regex supported with `/pattern/`.
See README for more examples. | -| `--batchPublish` | | Enable batch publishing up to 35 items per minute. _(Optional)_ | -| `-f`, `--force` | | Skip confirmation prompts before publishing. | -| `-s`, `--silent` | | Disable log file creation. | -| `--logFile` | | Path to write the log file.
Default: `(log_filename)` | -| `--publishRateLimit` | | Set the number of publishes per minute (max = 80).
Default: `35` | +| Option | Alias | Description | +| ---------------- | ----- | -------------------------------------------------------------------------------------------------------------------------------------------------- | +| `--repoId` | | The ID of a content repository to restrict publishing scope. _(Optional)_ | +| `--folderId` | | The ID of a folder to restrict publishing scope. _(Optional)_ | +| `--facet` | | Filter content using facets. Format:
`label:example name,locale:en-GB`
Regex supported with `/pattern/`.
See README for more examples. | +| `-f`, `--force` | | Skip confirmation prompts before publishing. | +| `-s`, `--silent` | | Disable log file creation. | +| `--logFile` | | Path to write the log file.
Default: `(log_filename)` | --- @@ -434,9 +429,3 @@ dc-cli content-item publish --repoId your-repo-id ```bash dc-cli content-item publish --facet "locale:en-GB,label:homepage" ``` - -##### Batch publish all items silently - -```bash -dc-cli content-item publish --batchPublish --silent --force -``` diff --git a/docs/HUB.md b/docs/HUB.md index b9f33279..5f451a33 100644 --- a/docs/HUB.md +++ b/docs/HUB.md @@ -107,7 +107,6 @@ dc-cli hub clone | --skipIncomplete | [boolean] | Skip any content item that has one or more missing dependancy. | | --lastPublish | [boolean] | When available, export the last published version of a content item rather than its newest version. | | --publish | [boolean] | Publish any content items that have an existing publish status in their JSON. | -| --batchPublish | [boolean] | Batch publish requests up to the rate limit. (35/min) | | --republish | [boolean] | Republish content items regardless of whether the import changed them or not.
(--publish not required) | | --excludeKeys | [boolean] | Exclude delivery keys when importing content items. | | --media | [boolean] | Detect and rewrite media links to match assets in the target account's DAM.
Your client must have DAM permissions configured. | diff --git a/src/commands/content-item/copy.spec.ts b/src/commands/content-item/copy.spec.ts index bffd90e7..2ce3dc25 100644 --- a/src/commands/content-item/copy.spec.ts +++ b/src/commands/content-item/copy.spec.ts @@ -138,12 +138,6 @@ describe('content-item copy command', () => { 'Publish any content items that either made a new version on import, or were published more recently in the JSON.' }); - expect(spyOption).toHaveBeenCalledWith('batchPublish', { - type: 'boolean', - boolean: true, - describe: 'Batch publish requests up to the rate limit. (35/min)' - }); - expect(spyOption).toHaveBeenCalledWith('republish', { type: 'boolean', boolean: true, @@ -253,7 +247,6 @@ describe('content-item copy command', () => { skipIncomplete: false, lastPublish: true, publish: true, - batchPublish: true, republish: true, excludeKeys: true, media: true @@ -278,7 +271,6 @@ describe('content-item copy command', () => { expect(importCalls[0].validate).toEqual(argv.validate); expect(importCalls[0].skipIncomplete).toEqual(argv.skipIncomplete); expect(importCalls[0].publish).toEqual(argv.publish); - expect(importCalls[0].batchPublish).toEqual(argv.batchPublish); expect(importCalls[0].republish).toEqual(argv.republish); expect(importCalls[0].excludeKeys).toEqual(argv.excludeKeys); expect(importCalls[0].media).toEqual(argv.media); @@ -307,7 +299,6 @@ describe('content-item copy command', () => { skipIncomplete: false, lastPublish: true, publish: true, - batchPublish: true, republish: true, excludeKeys: true, media: true, diff --git a/src/commands/content-item/copy.ts b/src/commands/content-item/copy.ts index 0f59789b..c62b1e04 100644 --- a/src/commands/content-item/copy.ts +++ b/src/commands/content-item/copy.ts @@ -118,12 +118,6 @@ export const builder = (yargs: Argv): void => { 'Publish any content items that either made a new version on import, or were published more recently in the JSON.' }) - .option('batchPublish', { - type: 'boolean', - boolean: true, - describe: 'Batch publish requests up to the rate limit. (35/min)' - }) - .option('republish', { type: 'boolean', boolean: true, @@ -255,7 +249,6 @@ export const handler = async (argv: Arguments { 'Publish any content items that either made a new version on import, or were published more recently in the JSON.' }); - expect(spyOption).toHaveBeenCalledWith('batchPublish', { - type: 'boolean', - boolean: true, - describe: 'Batch publish requests up to the rate limit. (35/min)' - }); - expect(spyOption).toHaveBeenCalledWith('republish', { type: 'boolean', boolean: true, @@ -269,7 +263,6 @@ describe('content-item move command', () => { publish: true, lastPublish: true, - batchPublish: true, mapFile: 'map.json', force: false, @@ -298,7 +291,6 @@ describe('content-item move command', () => { expect(copyCalls[0].publish).toEqual(argv.publish); expect(copyCalls[0].lastPublish).toEqual(argv.lastPublish); - expect(copyCalls[0].batchPublish).toEqual(argv.batchPublish); expect(argv.exportedIds).toEqual(exportIds); diff --git a/src/commands/content-item/move.ts b/src/commands/content-item/move.ts index a1b2fe6c..b1a99167 100644 --- a/src/commands/content-item/move.ts +++ b/src/commands/content-item/move.ts @@ -111,12 +111,6 @@ export const builder = (yargs: Argv): void => { 'Publish any content items that either made a new version on import, or were published more recently in the JSON.' }) - .option('batchPublish', { - type: 'boolean', - boolean: true, - describe: 'Batch publish requests up to the rate limit. (35/min)' - }) - .option('republish', { type: 'boolean', boolean: true, diff --git a/src/commands/hub/clone.ts b/src/commands/hub/clone.ts index 6b04af73..245d638c 100644 --- a/src/commands/hub/clone.ts +++ b/src/commands/hub/clone.ts @@ -116,11 +116,6 @@ export const builder = (yargs: Argv): void => { describe: 'Publish any content items that either made a new version on import, or were published more recently in the JSON.' }, - batchPublish: { - type: 'boolean', - boolean: true, - describe: 'Batch publish requests up to the rate limit. (35/min)' - }, republish: { type: 'boolean', boolean: true, diff --git a/src/common/publish/publish-options.ts b/src/common/publish/publish-options.ts index 83371b65..e8f6ffcc 100644 --- a/src/common/publish/publish-options.ts +++ b/src/common/publish/publish-options.ts @@ -8,5 +8,4 @@ export default interface PublishOptions { logFile: FileLog; force?: boolean; silent?: boolean; - publishRateLimit?: number; } diff --git a/src/interfaces/clone-hub-builder-options.ts b/src/interfaces/clone-hub-builder-options.ts index f7cb7800..1e7b932e 100644 --- a/src/interfaces/clone-hub-builder-options.ts +++ b/src/interfaces/clone-hub-builder-options.ts @@ -16,7 +16,6 @@ export interface CloneHubBuilderOptions { logFile: FileLog; lastPublish?: boolean; publish?: boolean; - batchPublish?: boolean; republish?: boolean; excludeKeys?: boolean; acceptSnapshotLimits: boolean; diff --git a/src/interfaces/copy-item-builder-options.interface.ts b/src/interfaces/copy-item-builder-options.interface.ts index 0a0387d7..fac6e3a5 100644 --- a/src/interfaces/copy-item-builder-options.interface.ts +++ b/src/interfaces/copy-item-builder-options.interface.ts @@ -18,7 +18,6 @@ export interface CopyItemBuilderOptions { revertLog: Promise; lastPublish?: boolean; publish?: boolean; - batchPublish?: boolean; republish?: boolean; excludeKeys?: boolean; exportedIds?: string[]; diff --git a/src/interfaces/import-item-builder-options.interface.ts b/src/interfaces/import-item-builder-options.interface.ts index dd0bf039..f9cbdafd 100644 --- a/src/interfaces/import-item-builder-options.interface.ts +++ b/src/interfaces/import-item-builder-options.interface.ts @@ -6,7 +6,6 @@ export interface ImportItemBuilderOptions { baseFolder?: string; mapFile?: string; publish?: boolean; - batchPublish?: boolean; republish?: boolean; force?: boolean; validate?: boolean; From 49a1fa287b3c4bfc983ec89bc9c50322c42c21db Mon Sep 17 00:00:00 2001 From: DB Date: Fri, 5 Sep 2025 17:25:05 +0100 Subject: [PATCH 11/43] test: updating publish tests to use new publishing service --- src/commands/content-item/publish.spec.ts | 74 +++++++++++------------ 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/src/commands/content-item/publish.spec.ts b/src/commands/content-item/publish.spec.ts index f88e59ca..d3d4102a 100644 --- a/src/commands/content-item/publish.spec.ts +++ b/src/commands/content-item/publish.spec.ts @@ -1,18 +1,45 @@ import { builder, handler, getContentItems, processItems, LOG_FILENAME, coerceLog } from './publish'; -import { Status, ContentItem, DynamicContent, Hub, PublishingJobLocation, PublishingJob } from 'dc-management-sdk-js'; +import { Status, ContentItem, DynamicContent, Hub, PublishingJob } from 'dc-management-sdk-js'; import { FileLog } from '../../common/file-log'; -import * as publishingService from '../../common/publishing/content-item-publishing-service'; import { Arguments } from 'yargs'; import { ConfigurationParameters } from '../configure'; import PublishOptions from '../../common/publish/publish-options'; import Yargs from 'yargs/yargs'; import readline from 'readline'; +import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingJobStatus'; + +const mockPublish = jest.fn().mockImplementation((contentItems, fn) => { + fn(contentItems); +}); +const mockCheck = jest.fn().mockImplementation((publishingJob, fn) => { + fn(new PublishingJob({ state: PublishingJobStatus.COMPLETED })); +}); jest.mock('../../services/dynamic-content-client-factory'); jest.mock('../../common/content-item/confirm-all-content'); jest.mock('../../common/log-helpers'); jest.mock('../../common/filter/fetch-content'); jest.mock('readline'); +jest.mock('../../common/publishing/content-item-publishing-service', () => { + return { + ContentItemPublishingService: jest.fn().mockImplementation(() => { + return { + publish: mockPublish, + onIdle: jest.fn() + }; + }) + }; +}); +jest.mock('../../common/publishing/content-item-publishing-job-service', () => { + return { + ContentItemPublishingJobService: jest.fn().mockImplementation(() => { + return { + check: mockCheck, + onIdle: jest.fn() + }; + }) + }; +}); const mockClient = { contentItems: { @@ -136,26 +163,9 @@ describe('publish tests', () => { }); describe('processItems tests', () => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let publishCalls: any[]; - beforeEach(() => { jest.clearAllMocks(); - - publishCalls = []; - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (publishingService as any).PublishingService = jest.fn().mockImplementation(() => ({ - publish: jest.fn(async (item, action) => { - publishCalls.push(item); - action(); - }), - publishJobs: [], - onIdle: jest.fn().mockResolvedValue(undefined) - })); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (publishingService as any).publishCalls = publishCalls; + jest.mock('readline'); }); it('should exit early if no content items', async () => { @@ -193,19 +203,6 @@ describe('publish tests', () => { it('should process all items and call publish', async () => { const contentItem = new ContentItem({ id: '1', label: 'Publish Me', body: { _meta: {} } }); - const mockedPublishingJob = jest.fn(); - const mockedPublish = jest.fn(); - - const publishingJobLocation = new PublishingJobLocation({ - location: 'https://api.amplience.net/v2/content/publishing-jobs/68adcb6c1ad05f3b50ebc821' - }); - - publishingJobLocation.related.publishingJob = mockedPublishingJob.mockResolvedValue(new PublishingJob()); - - contentItem.related.publish = mockedPublish.mockResolvedValue(publishingJobLocation); - - mockClient.contentItems.get = jest.fn().mockResolvedValue(contentItem); - // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['Y']); @@ -219,8 +216,8 @@ describe('publish tests', () => { client: mockClient }); - expect(mockedPublish).toHaveBeenCalledTimes(1); - expect(mockedPublishingJob).toHaveBeenCalledTimes(1); + expect(mockPublish).toHaveBeenCalledTimes(1); + expect(mockCheck).toHaveBeenCalledTimes(1); }); it('should process all items while filtering out any dependencies and call publish', async () => { @@ -242,6 +239,9 @@ describe('publish tests', () => { body: { _meta: {} } }); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (readline as any).setResponses(['Y']); + await processItems({ contentItems: [contentItemWithDependency, contentItemDependency], force: true, @@ -252,8 +252,8 @@ describe('publish tests', () => { client: mockClient }); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - expect(publishCalls.length).toBe(1); + expect(mockPublish).toHaveBeenCalledTimes(1); + expect(mockCheck).toHaveBeenCalledTimes(1); }); }); From c5bc0bf54ccba23e302d9d68fa93cd57134f43bc Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Mon, 8 Sep 2025 12:03:20 +0100 Subject: [PATCH 12/43] feat: add publish progress and only log to log file --- src/commands/content-item/import.ts | 19 ++++++++++++++----- src/commands/content-item/publish.ts | 22 +++++++++++++++++----- 2 files changed, 31 insertions(+), 10 deletions(-) diff --git a/src/commands/content-item/import.ts b/src/commands/content-item/import.ts index e8183b8e..e66ab559 100644 --- a/src/commands/content-item/import.ts +++ b/src/commands/content-item/import.ts @@ -886,8 +886,9 @@ const importTree = async ( log.appendLine(`Publishing ${publishable.length} items. (${publishChildren} children included)`); const publishingService = new ContentItemPublishingService(); - const contentItemPublishJobs: [ContentItem, PublishingJob][] = []; + const publishProgress = progressBar(publishable.length, 0, { title: 'Publishing content items' }); + for (let i = 0; i < publishable.length; i++) { const item = publishable[i].item; @@ -895,31 +896,39 @@ const importTree = async ( await publishingService.publish(item, (contentItem, publishingJob) => { contentItemPublishJobs.push([contentItem, publishingJob]); - log.appendLine(`Initiated publish for "${item.label}"`); + log.addComment(`Initiated publish for "${item.label}"`); + publishProgress.increment(); }); } catch (e) { - log.appendLine(`Failed to initiate publish for ${item.label}: ${e.toString()}`); + log.appendLine(`\nFailed to initiate publish for ${item.label}: ${e.toString()}`); + publishProgress.increment(); } } await publishingService.onIdle(); + publishProgress.stop(); const checkPublishJobs = await asyncQuestion( - 'All publishes have been requested, would you like to wait for all publishes to complete? (Y/n)' + 'All publishes have been requested, would you like to wait for all publishes to complete? (y/n)' ); if (checkPublishJobs) { const publishingJobService = new ContentItemPublishingJobService(client); + const checkPublishProgress = progressBar(contentItemPublishJobs.length, 0, { + title: 'Content items publishes complete' + }); for (const [contentItem, publishingJob] of contentItemPublishJobs) { publishingJobService.check(publishingJob, async resolvedPublishingJob => { if (resolvedPublishingJob.state === PublishingJobStatus.FAILED) { - log.appendLine(`Failed to publish ${contentItem.label}: ${resolvedPublishingJob.publishErrorStatus}`); + log.appendLine(`\nFailed to publish ${contentItem.label}: ${resolvedPublishingJob.publishErrorStatus}`); } + checkPublishProgress.increment(); }); } await publishingJobService.onIdle(); + checkPublishProgress.stop(); } } diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index 09fff26b..c6c55f1f 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -15,6 +15,7 @@ import { ContentMapping } from '../../common/content-mapping'; import { ContentItemPublishingService } from '../../common/publishing/content-item-publishing-service'; import { ContentItemPublishingJobService } from '../../common/publishing/content-item-publishing-job-service'; import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingJobStatus'; +import { progressBar } from '../../common/progress-bar/progress-bar'; export const command = 'publish [id]'; @@ -184,38 +185,49 @@ export const processItems = async ({ log.appendLine(`Publishing ${rootContentItems.length} items.`); const publishingService = new ContentItemPublishingService(); - const contentItemPublishJobs: [ContentItem, PublishingJob][] = []; + const publishProgress = progressBar(rootContentItems.length, 0, { title: 'Publishing content items' }); + for (const item of rootContentItems) { try { await publishingService.publish(item, (contentItem, publishingJob) => { contentItemPublishJobs.push([contentItem, publishingJob]); - log.appendLine(`Initiated publish for "${item.label}"`); + log.addComment(`Initiated publish for "${item.label}"`); + publishProgress.increment(); }); } catch (e) { - log.appendLine(`Failed to initiate publish for ${item.label}: ${e.toString()}`); + log.appendLine(`\nFailed to initiate publish for ${item.label}: ${e.toString()}`); + publishProgress.increment(); } } await publishingService.onIdle(); + publishProgress.stop(); const checkPublishJobs = await asyncQuestion( - 'All publishes have been requested, would you like to wait for all publishes to complete? (Y/n)' + 'All publishes have been requested, would you like to wait for all publishes to complete? (y/n)' ); if (checkPublishJobs) { + log.appendLine(`Checking publishing state for ${contentItemPublishJobs.length} items.`); + const checkPublishProgress = progressBar(contentItemPublishJobs.length, 0, { + title: 'Content items publishes complete' + }); + const publishingJobService = new ContentItemPublishingJobService(client); for (const [contentItem, publishingJob] of contentItemPublishJobs) { publishingJobService.check(publishingJob, async resolvedPublishingJob => { if (resolvedPublishingJob.state === PublishingJobStatus.FAILED) { - log.appendLine(`Failed to publish ${contentItem.label}: ${resolvedPublishingJob.publishErrorStatus}`); + log.appendLine(`\nFailed to publish ${contentItem.label}: ${resolvedPublishingJob.publishErrorStatus}`); } + checkPublishProgress.increment(); }); } await publishingJobService.onIdle(); + checkPublishProgress.stop(); } log.appendLine(`Publishing complete`); From 5c768d97a3be8748a9d9b6decc307951ab3fe93f Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Mon, 8 Sep 2025 12:10:02 +0100 Subject: [PATCH 13/43] feat: allow publish checks to occur without user input --- src/commands/content-item/import.ts | 9 +++++---- src/commands/content-item/publish.ts | 9 +++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/commands/content-item/import.ts b/src/commands/content-item/import.ts index e66ab559..94d8ca30 100644 --- a/src/commands/content-item/import.ts +++ b/src/commands/content-item/import.ts @@ -908,11 +908,12 @@ const importTree = async ( await publishingService.onIdle(); publishProgress.stop(); - const checkPublishJobs = await asyncQuestion( - 'All publishes have been requested, would you like to wait for all publishes to complete? (y/n)' - ); + const checkPublishJobs = async () => + await asyncQuestion( + 'All publishes have been requested, would you like to wait for all publishes to complete? (y/n)' + ); - if (checkPublishJobs) { + if (argv.force || (await checkPublishJobs())) { const publishingJobService = new ContentItemPublishingJobService(client); const checkPublishProgress = progressBar(contentItemPublishJobs.length, 0, { title: 'Content items publishes complete' diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index c6c55f1f..d675a718 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -205,11 +205,12 @@ export const processItems = async ({ await publishingService.onIdle(); publishProgress.stop(); - const checkPublishJobs = await asyncQuestion( - 'All publishes have been requested, would you like to wait for all publishes to complete? (y/n)' - ); + const checkPublishJobs = async () => + await asyncQuestion( + 'All publishes have been requested, would you like to wait for all publishes to complete? (y/n)' + ); - if (checkPublishJobs) { + if (force || (await checkPublishJobs())) { log.appendLine(`Checking publishing state for ${contentItemPublishJobs.length} items.`); const checkPublishProgress = progressBar(contentItemPublishJobs.length, 0, { title: 'Content items publishes complete' From e0c13b7b3ca6457be54643d3c8f1d61ecb336f55 Mon Sep 17 00:00:00 2001 From: DB Date: Tue, 9 Sep 2025 08:56:56 +0100 Subject: [PATCH 14/43] test: adding tests for publishing services --- ...ontent-item-publishing-job-service.spec.ts | 64 +++++++++++++++++++ .../content-item-publishing-service.spec.ts | 53 +++++++++++++++ 2 files changed, 117 insertions(+) create mode 100644 src/common/publishing/content-item-publishing-job-service.spec.ts create mode 100644 src/common/publishing/content-item-publishing-service.spec.ts diff --git a/src/common/publishing/content-item-publishing-job-service.spec.ts b/src/common/publishing/content-item-publishing-job-service.spec.ts new file mode 100644 index 00000000..95a5f760 --- /dev/null +++ b/src/common/publishing/content-item-publishing-job-service.spec.ts @@ -0,0 +1,64 @@ +import { DynamicContent, PublishingJob } from 'dc-management-sdk-js'; +import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingJobStatus'; +import { ContentItemPublishingJobService } from './content-item-publishing-job-service'; + +jest.mock('../burstable-queue/burstable-queue', () => ({ + BurstableQueue: jest.fn().mockImplementation(() => ({ + add: (fn: () => Promise) => fn(), + onIdle: async () => Promise.resolve(), + size: () => 0, + pending: () => 0 + })) +})); + +const mockClient: { + publishingJob: { + get: jest.Mock, [string]>; + }; +} = { + publishingJob: { + get: jest.fn() + } +}; + +describe('ContentItemPublishingJobService', () => { + let service: ContentItemPublishingJobService; + const baseJob = { id: 'job1' } as PublishingJob; + + beforeEach(() => { + jest.clearAllMocks(); + service = new ContentItemPublishingJobService(mockClient as unknown as DynamicContent); + }); + + it('calls callback when job completes', async () => { + (mockClient.publishingJob.get as jest.Mock).mockResolvedValue({ ...baseJob, state: PublishingJobStatus.COMPLETED }); + + const cb = jest.fn(); + await service.check(baseJob, cb); + await service.onIdle(); + + expect(cb).toHaveBeenCalledWith(expect.objectContaining({ state: PublishingJobStatus.COMPLETED })); + }); + + it('calls callback when job fails', async () => { + (mockClient.publishingJob.get as jest.Mock).mockResolvedValue({ ...baseJob, state: PublishingJobStatus.FAILED }); + + const cb = jest.fn(); + await service.check(baseJob, cb); + await service.onIdle(); + + expect(cb).toHaveBeenCalledWith(expect.objectContaining({ state: PublishingJobStatus.FAILED })); + }); + + it('retries until job is done', async () => { + (mockClient.publishingJob.get as jest.Mock) + .mockResolvedValueOnce({ ...baseJob, state: PublishingJobStatus.PREPARING }) + .mockResolvedValueOnce({ ...baseJob, state: PublishingJobStatus.COMPLETED }); + + const cb = jest.fn(); + await service.check(baseJob, cb); + await service.onIdle(); + + expect(cb).toHaveBeenCalledTimes(1); + }); +}); diff --git a/src/common/publishing/content-item-publishing-service.spec.ts b/src/common/publishing/content-item-publishing-service.spec.ts new file mode 100644 index 00000000..8ddc8138 --- /dev/null +++ b/src/common/publishing/content-item-publishing-service.spec.ts @@ -0,0 +1,53 @@ +import { ContentItem, PublishingJob } from 'dc-management-sdk-js'; +import { ContentItemPublishingService } from './content-item-publishing-service'; + +jest.mock('../burstable-queue/burstable-queue', () => ({ + BurstableQueue: jest.fn().mockImplementation(() => ({ + add: (fn: () => Promise) => fn(), + onIdle: async () => Promise.resolve(), + size: () => 0, + pending: () => 0 + })) +})); + +const createMockContentItem = (id: string, jobId: string): ContentItem => { + const publishJob: PublishingJob = { id: jobId } as PublishingJob; + const publishLocation = { related: { publishingJob: jest.fn().mockResolvedValue(publishJob) } }; + return { + id, + related: { publish: jest.fn().mockResolvedValue(publishLocation) } + } as unknown as ContentItem; +}; + +describe('ContentItemPublishingService', () => { + let service: ContentItemPublishingService; + let item1: ContentItem; + let item2: ContentItem; + + beforeEach(() => { + service = new ContentItemPublishingService(); + item1 = createMockContentItem('item-1', 'job-1'); + item2 = createMockContentItem('item-2', 'job-2'); + }); + + it('publishes an item and records the job', async () => { + const cb = jest.fn(); + + await service.publish(item1, cb); + await service.onIdle(); + + expect(cb).toHaveBeenCalledWith(item1, expect.objectContaining({ id: 'job-1' })); + expect(service.publishJobs.map(j => j.id)).toEqual(['job-1']); + }); + + it('handles multiple publishes', async () => { + const cb = jest.fn(); + + await service.publish(item1, cb); + await service.publish(item2, cb); + await service.onIdle(); + + expect(cb).toHaveBeenCalledTimes(2); + expect(service.publishJobs.map(j => j.id)).toEqual(['job-1', 'job-2']); + }); +}); From b637988b156f1f766e1fbb7dd986b69fc9abd542 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Mon, 15 Sep 2025 13:50:08 +0100 Subject: [PATCH 15/43] fix: improve logging when publishing content --- .gitignore | 6 +++++- src/commands/content-item/import.ts | 4 +++- src/commands/content-item/publish.spec.ts | 1 + src/commands/content-item/publish.ts | 2 +- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 4b28f80f..fd2b89b9 100644 --- a/.gitignore +++ b/.gitignore @@ -53,7 +53,8 @@ temp/ .LSOverride # Icon must end with two \r -Icon +Icon + # Thumbnails ._* @@ -73,3 +74,6 @@ Icon Network Trash Folder Temporary Items .apdisk + +# dc-cli outputs +exports \ No newline at end of file diff --git a/src/commands/content-item/import.ts b/src/commands/content-item/import.ts index 94d8ca30..3ed4272d 100644 --- a/src/commands/content-item/import.ts +++ b/src/commands/content-item/import.ts @@ -895,7 +895,6 @@ const importTree = async ( try { await publishingService.publish(item, (contentItem, publishingJob) => { contentItemPublishJobs.push([contentItem, publishingJob]); - log.addComment(`Initiated publish for "${item.label}"`); publishProgress.increment(); }); @@ -905,6 +904,7 @@ const importTree = async ( } } + log.addComment(`Waiting for publishes to be requested`); await publishingService.onIdle(); publishProgress.stop(); @@ -914,6 +914,7 @@ const importTree = async ( ); if (argv.force || (await checkPublishJobs())) { + log.addComment(`Checking publshing jobs`); const publishingJobService = new ContentItemPublishingJobService(client); const checkPublishProgress = progressBar(contentItemPublishJobs.length, 0, { title: 'Content items publishes complete' @@ -921,6 +922,7 @@ const importTree = async ( for (const [contentItem, publishingJob] of contentItemPublishJobs) { publishingJobService.check(publishingJob, async resolvedPublishingJob => { + log.addComment(`Finished checking publish job for ${contentItem.label}`); if (resolvedPublishingJob.state === PublishingJobStatus.FAILED) { log.appendLine(`\nFailed to publish ${contentItem.label}: ${resolvedPublishingJob.publishErrorStatus}`); } diff --git a/src/commands/content-item/publish.spec.ts b/src/commands/content-item/publish.spec.ts index d3d4102a..d24ec78f 100644 --- a/src/commands/content-item/publish.spec.ts +++ b/src/commands/content-item/publish.spec.ts @@ -53,6 +53,7 @@ const mockClient = { const mockLog = { open: jest.fn().mockReturnValue({ appendLine: jest.fn(), + addComment: jest.fn(), close: jest.fn() }) } as unknown as FileLog; diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index d675a718..2a7c4e12 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -192,7 +192,6 @@ export const processItems = async ({ try { await publishingService.publish(item, (contentItem, publishingJob) => { contentItemPublishJobs.push([contentItem, publishingJob]); - log.addComment(`Initiated publish for "${item.label}"`); publishProgress.increment(); }); @@ -220,6 +219,7 @@ export const processItems = async ({ for (const [contentItem, publishingJob] of contentItemPublishJobs) { publishingJobService.check(publishingJob, async resolvedPublishingJob => { + log.addComment(`Finished checking publish job for ${contentItem.label}`); if (resolvedPublishingJob.state === PublishingJobStatus.FAILED) { log.appendLine(`\nFailed to publish ${contentItem.label}: ${resolvedPublishingJob.publishErrorStatus}`); } From 2f0caba5057615f6c3e2e50384d59283e4019029 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Mon, 15 Sep 2025 15:20:36 +0100 Subject: [PATCH 16/43] fix: moving publish complete log to correct scope --- src/commands/content-item/import.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/commands/content-item/import.ts b/src/commands/content-item/import.ts index 3ed4272d..9745ae58 100644 --- a/src/commands/content-item/import.ts +++ b/src/commands/content-item/import.ts @@ -933,9 +933,9 @@ const importTree = async ( await publishingJobService.onIdle(); checkPublishProgress.stop(); } + log.appendLine('Publishing complete'); } - log.appendLine('Publishing complete'); return true; }; From 3286ecd695629070f8fe3315a7601b09fad99da0 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Mon, 15 Sep 2025 17:26:15 +0100 Subject: [PATCH 17/43] feat: add min time to burstable queue to spread load --- src/common/burstable-queue/burstable-queue.spec.ts | 6 ++++++ src/common/burstable-queue/burstable-queue.ts | 3 +++ 2 files changed, 9 insertions(+) diff --git a/src/common/burstable-queue/burstable-queue.spec.ts b/src/common/burstable-queue/burstable-queue.spec.ts index e28470dc..c0052890 100644 --- a/src/common/burstable-queue/burstable-queue.spec.ts +++ b/src/common/burstable-queue/burstable-queue.spec.ts @@ -6,6 +6,7 @@ describe('burstable-queue', () => { const interval = 500; const burstableQueue = new BurstableQueue({ concurrency: 1, + minTime: 0, burstIntervalCap: 4, sustainedIntervalCap: 1, interval @@ -53,6 +54,7 @@ describe('burstable-queue', () => { it('should wait until the the queue is idle (queue is empty and all tasks executed)', async () => { const burstableQueue = new BurstableQueue({ concurrency: 1, + minTime: 0, burstIntervalCap: 4, sustainedIntervalCap: 1, interval: 400 @@ -77,6 +79,7 @@ describe('burstable-queue', () => { it('should return the size of the queue (queued and executing) - all queued', () => { const burstableQueue = new BurstableQueue({ concurrency: 1, + minTime: 0, burstIntervalCap: 4, sustainedIntervalCap: 1, interval: 400 @@ -94,6 +97,7 @@ describe('burstable-queue', () => { it('should return the size of the queue (queued and executing) - queue task in flight', async () => { const burstableQueue = new BurstableQueue({ concurrency: 1, + minTime: 0, burstIntervalCap: 4, sustainedIntervalCap: 1, interval: 400 @@ -115,6 +119,7 @@ describe('burstable-queue', () => { it('should return the number of pending queue items (queued, not executing)', () => { const burstableQueue = new BurstableQueue({ concurrency: 1, + minTime: 0, burstIntervalCap: 4, sustainedIntervalCap: 1, interval: 400 @@ -132,6 +137,7 @@ describe('burstable-queue', () => { it('should return the number of pending queue items (queued, not executing) - queue task in flight', async () => { const burstableQueue = new BurstableQueue({ concurrency: 1, + minTime: 0, burstIntervalCap: 4, sustainedIntervalCap: 1, interval: 400 diff --git a/src/common/burstable-queue/burstable-queue.ts b/src/common/burstable-queue/burstable-queue.ts index 76171eba..50205ab0 100644 --- a/src/common/burstable-queue/burstable-queue.ts +++ b/src/common/burstable-queue/burstable-queue.ts @@ -1,12 +1,14 @@ import Bottleneck from 'bottleneck'; export const CONCURRENCY = 4; +export const MIN_TIME = 1000; export const INITIAL_RESERVOIR = 70; export const RESERVOIR_REFRESH_AMOUNT = 30; export const RESERVOIR_INCREASE_INTERVAL = 60_000; export interface BurstableQueueOptions { concurrency?: number; + minTime?: number; burstIntervalCap?: number; sustainedIntervalCap?: number; interval?: number; @@ -18,6 +20,7 @@ export class BurstableQueue { constructor(options: BurstableQueueOptions) { this.queue = new Bottleneck({ maxConcurrent: options.concurrency || CONCURRENCY, + minTime: options.minTime ?? MIN_TIME, reservoir: options.burstIntervalCap || INITIAL_RESERVOIR, // initial value reservoirRefreshAmount: options.sustainedIntervalCap || RESERVOIR_REFRESH_AMOUNT, reservoirRefreshInterval: options.interval || RESERVOIR_INCREASE_INTERVAL From 0b893cb55f29b3f828195d827f6d8cb4ae34e353 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Tue, 16 Sep 2025 15:40:35 +0100 Subject: [PATCH 18/43] fix: lowering min time to make sure we use the full burst interval reservoir --- src/common/burstable-queue/burstable-queue.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/common/burstable-queue/burstable-queue.ts b/src/common/burstable-queue/burstable-queue.ts index 50205ab0..45ab3566 100644 --- a/src/common/burstable-queue/burstable-queue.ts +++ b/src/common/burstable-queue/burstable-queue.ts @@ -1,7 +1,7 @@ import Bottleneck from 'bottleneck'; export const CONCURRENCY = 4; -export const MIN_TIME = 1000; +export const MIN_TIME = 800; export const INITIAL_RESERVOIR = 70; export const RESERVOIR_REFRESH_AMOUNT = 30; export const RESERVOIR_INCREASE_INTERVAL = 60_000; From 056743b6ab7cb2fe7ed5777e34a0932b838357a5 Mon Sep 17 00:00:00 2001 From: DB Date: Wed, 17 Sep 2025 17:35:57 +0100 Subject: [PATCH 19/43] feat: add unpublish command --- docs/CONTENT-ITEM.md | 54 ++++ src/commands/content-item/import.ts | 2 +- src/commands/content-item/unpublish.spec.ts | 270 ++++++++++++++++++ src/commands/content-item/unpublish.ts | 266 +++++++++++++++++ ...ontent-item-publishing-job-service.spec.ts | 4 +- .../content-item-publishing-service.spec.ts | 6 +- .../content-item-unpublishing-service.spec.ts | 55 ++++ .../content-item-unpublishing-service.ts | 24 ++ 8 files changed, 673 insertions(+), 8 deletions(-) create mode 100644 src/commands/content-item/unpublish.spec.ts create mode 100644 src/commands/content-item/unpublish.ts create mode 100644 src/common/publishing/content-item-unpublishing-service.spec.ts create mode 100644 src/common/publishing/content-item-unpublishing-service.ts diff --git a/docs/CONTENT-ITEM.md b/docs/CONTENT-ITEM.md index 1baf6811..0229bbd3 100644 --- a/docs/CONTENT-ITEM.md +++ b/docs/CONTENT-ITEM.md @@ -28,6 +28,7 @@ Return to [README.md](../README.md) for information on other command categories. - [unarchive](#unarchive) - [tree](#tree) - [publish](#publish) + - [unpublish](#unpublish) @@ -429,3 +430,56 @@ dc-cli content-item publish --repoId your-repo-id ```bash dc-cli content-item publish --facet "locale:en-GB,label:homepage" ``` + +### unpublish + +Unpublishes content items to a content hub. You can unpublish all items or specify individual content items by ID. + +```bash +dc-cli content-item unpublish [id] +``` + +If no `id` is provided, all content items in all content repositories in the specified hub will be unpublished. + +--- + +#### Positionals + +| Argument | Description | +| -------- | --------------------------------------------------------------------------------------------------------------------------- | +| `id` | The ID of a content item to be published. If omitted, all content items in all repositories will be published. _(Optional)_ | + +--- + +#### Options + +| Option | Alias | Description | +| ---------------- | ----- | -------------------------------------------------------------------------------------------------------------------------------------------------- | +| `--repoId` | | The ID of a content repository to restrict unpublishing scope. _(Optional)_ | +| `--folderId` | | The ID of a folder to restrict unpublishing scope. _(Optional)_ | +| `--facet` | | Filter content using facets. Format:
`label:example name,locale:en-GB`
Regex supported with `/pattern/`.
See README for more examples. | +| `-f`, `--force` | | Skip confirmation prompts before unpublishing. | +| `-s`, `--silent` | | Disable log file creation. | +| `--logFile` | | Path to write the log file.
Default: `(log_filename)` | + +--- + +#### Examples + +##### Unpublish a specific content item by ID + +```bash +dc-cli content-item unpublish 1234abcd +``` + +##### Unpublish all content in a specific repository + +```bash +dc-cli content-item unpublish --repoId your-repo-id +``` + +##### Use facets to unpublish filtered content + +```bash +dc-cli content-item unpublish --facet "locale:en-GB,label:homepage" +``` diff --git a/src/commands/content-item/import.ts b/src/commands/content-item/import.ts index 9745ae58..fba4b6e6 100644 --- a/src/commands/content-item/import.ts +++ b/src/commands/content-item/import.ts @@ -914,7 +914,7 @@ const importTree = async ( ); if (argv.force || (await checkPublishJobs())) { - log.addComment(`Checking publshing jobs`); + log.addComment(`Checking publishing jobs`); const publishingJobService = new ContentItemPublishingJobService(client); const checkPublishProgress = progressBar(contentItemPublishJobs.length, 0, { title: 'Content items publishes complete' diff --git a/src/commands/content-item/unpublish.spec.ts b/src/commands/content-item/unpublish.spec.ts new file mode 100644 index 00000000..950f4f98 --- /dev/null +++ b/src/commands/content-item/unpublish.spec.ts @@ -0,0 +1,270 @@ +import { builder, handler, getContentItems, processItems, LOG_FILENAME, coerceLog } from './unpublish'; +import { Status, ContentItem, DynamicContent, Hub } from 'dc-management-sdk-js'; +import { FileLog } from '../../common/file-log'; +import { Arguments } from 'yargs'; +import { ConfigurationParameters } from '../configure'; +import PublishOptions from '../../common/publish/publish-options'; +import Yargs from 'yargs/yargs'; + +const mockUnpublish = jest.fn().mockImplementation((contentItems, fn) => { + fn(contentItems); +}); + +jest.mock('../../services/dynamic-content-client-factory'); +jest.mock('../../common/content-item/confirm-all-content'); +jest.mock('../../common/log-helpers'); +jest.mock('../../common/filter/fetch-content'); +jest.mock('readline'); +jest.mock('../../common/publishing/content-item-unpublishing-service', () => { + return { + ContentItemUnpublishingService: jest.fn().mockImplementation(() => { + return { + unpublish: mockUnpublish, + onIdle: jest.fn() + }; + }) + }; +}); + +const mockClient = { + contentItems: { + get: jest.fn() + }, + hubs: { + get: jest.fn() + } +} as unknown as DynamicContent; + +const mockLog = { + open: jest.fn().mockReturnValue({ + appendLine: jest.fn(), + addComment: jest.fn(), + close: jest.fn() + }) +} as unknown as FileLog; + +describe('unpublish tests', () => { + describe('builder tests', () => { + it('should configure yargs', function () { + const argv = Yargs(process.argv.slice(2)); + const spyPositional = jest.spyOn(argv, 'positional').mockReturnThis(); + const spyOption = jest.spyOn(argv, 'option').mockReturnThis(); + + builder(argv); + + expect(spyPositional).toHaveBeenCalledWith('id', { + type: 'string', + describe: + 'The ID of a content item to be unpublished. If id is not provided, this command will unpublish ALL content items through all content repositories in the hub.' + }); + + expect(spyOption).toHaveBeenCalledWith('repoId', { + type: 'string', + describe: 'The ID of a content repository to search items in to be unpublished.', + requiresArg: false + }); + + expect(spyOption).toHaveBeenCalledWith('folderId', { + type: 'string', + describe: 'The ID of a folder to search items in to be unpublished.', + requiresArg: false + }); + + expect(spyOption).toHaveBeenCalledWith('facet', { + type: 'string', + describe: + "Unpublish content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme." + }); + + expect(spyOption).toHaveBeenCalledWith('f', { + type: 'boolean', + boolean: true, + describe: 'If present, there will be no confirmation prompt before unpublishing the found content.' + }); + + expect(spyOption).toHaveBeenCalledWith('s', { + type: 'boolean', + boolean: true, + describe: 'If present, no log file will be produced.' + }); + + expect(spyOption).toHaveBeenCalledWith('logFile', { + type: 'string', + default: LOG_FILENAME, + describe: 'Path to a log file to write to.', + coerce: coerceLog + }); + }); + }); + + describe('getContentItems tests', () => { + beforeEach(() => jest.clearAllMocks()); + + it('should return content items by id', async () => { + const mockItem = { id: '1', status: Status.ACTIVE } as ContentItem; + mockClient.contentItems.get = jest.fn().mockResolvedValue(mockItem); + + const result = await getContentItems({ + client: mockClient, + id: '1', + hubId: 'hub-id' + }); + + expect(result.contentItems).toEqual([mockItem]); + expect(result.missingContent).toBe(false); + }); + + it('should filter out non-active content items', async () => { + mockClient.contentItems.get = jest + .fn() + .mockResolvedValueOnce({ id: '1', status: Status.ARCHIVED }) + .mockResolvedValueOnce({ id: '2', status: Status.ACTIVE }); + + const result = await getContentItems({ + client: mockClient, + id: ['1', '2'], + hubId: 'hub-id' + }); + + expect(result.contentItems).toHaveLength(1); + expect(result.contentItems[0].id).toBe('2'); + expect(result.missingContent).toBe(true); + }); + + it('should return content using fallback filters', async () => { + const mockHub = {} as Hub; + const contentItems = [{ id: 'a', status: Status.ACTIVE }] as ContentItem[]; + const getContent = require('../../common/filter/fetch-content').getContent; + mockClient.hubs.get = jest.fn().mockResolvedValue(mockHub); + getContent.mockResolvedValue(contentItems); + + const result = await getContentItems({ + client: mockClient, + hubId: 'hub-id', + facet: 'label:test' + }); + + expect(result.contentItems).toEqual(contentItems); + }); + }); + + describe('processItems tests', () => { + beforeEach(() => { + jest.clearAllMocks(); + jest.mock('readline'); + }); + + it('should exit early if no content items', async () => { + console.log = jest.fn(); + + await processItems({ + contentItems: [], + logFile: mockLog, + allContent: false, + missingContent: false + }); + + expect(console.log).toHaveBeenCalledWith('Nothing found to unpublish, aborting.'); + }); + + it('should confirm before unpublishing when force is false', async () => { + const confirmAllContent = require('../../common/content-item/confirm-all-content').confirmAllContent; + confirmAllContent.mockResolvedValue(false); + console.log = jest.fn(); + + await processItems({ + contentItems: [new ContentItem({ id: '1', label: 'Test', body: { _meta: {} } })], + force: false, + silent: true, + logFile: mockLog, + allContent: false, + missingContent: false + }); + + expect(confirmAllContent).toHaveBeenCalled(); + }); + + it('should process all items and call unpublish', async () => { + const contentItem = new ContentItem({ + id: '1', + label: 'Unpublish Me', + body: { _meta: {} } + }); + + await processItems({ + contentItems: [contentItem], + force: true, + silent: true, + logFile: mockLog, + allContent: false, + missingContent: false + }); + + expect(mockUnpublish).toHaveBeenCalledTimes(1); + }); + + it('should process all items while filtering out any dependencies and call unpublish', async () => { + const contentItemWithDependency = new ContentItem({ + id: 'da2ee918-34c3-4fc1-ae05-111111111111', + label: 'Unpublish me', + body: { + _meta: {}, + dependency: { + _meta: { schema: 'http://bigcontent.io/cms/schema/v1/core#/definitions/content-link' }, + contentType: 'http://bigcontent.io/cms/schema/v1/text', + id: 'da2ee918-34c3-4fc1-ae05-222222222222' + } + } + }); + const contentItemDependency = new ContentItem({ + id: 'da2ee918-34c3-4fc1-ae05-222222222222', + label: 'No need to unpublish me', + body: { _meta: {} } + }); + + await processItems({ + contentItems: [contentItemWithDependency, contentItemDependency], + force: true, + silent: true, + logFile: mockLog, + allContent: false, + missingContent: false + }); + + expect(mockUnpublish).toHaveBeenCalledTimes(1); + }); + }); + + describe('handler tests', () => { + const clientFactory = require('../../services/dynamic-content-client-factory').default; + const getItemsSpy = jest.spyOn(require('./unpublish'), 'getContentItems'); + const processSpy = jest.spyOn(require('./unpublish'), 'processItems'); + beforeEach(() => { + jest.clearAllMocks(); + clientFactory.mockReturnValue(mockClient); + getItemsSpy.mockResolvedValue({ + contentItems: [{ id: '123', label: 'Test', status: Status.ACTIVE }], + missingContent: false + }); + processSpy.mockResolvedValue(undefined); + }); + it('should warn when both id and facet are provided', async () => { + console.log = jest.fn(); + await handler({ + id: '1', + facet: 'label:test', + hubId: 'hub-id', + logFile: mockLog + } as Arguments); + expect(console.log).toHaveBeenCalledWith('Please specify either a facet or an ID - not both.'); + }); + it('should process items with valid inputs', async () => { + await handler({ + hubId: 'hub-id', + logFile: mockLog + } as Arguments); + expect(getItemsSpy).toHaveBeenCalled(); + expect(processSpy).toHaveBeenCalled(); + }); + }); +}); diff --git a/src/commands/content-item/unpublish.ts b/src/commands/content-item/unpublish.ts new file mode 100644 index 00000000..3d28607f --- /dev/null +++ b/src/commands/content-item/unpublish.ts @@ -0,0 +1,266 @@ +/* eslint-disable @typescript-eslint/ban-ts-comment */ +import { Arguments, Argv } from 'yargs'; +import { ConfigurationParameters } from '../configure'; +import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; +import { confirmAllContent } from '../../common/content-item/confirm-all-content'; +import PublishOptions from '../../common/publish/publish-options'; +import { + ContentItem, + ContentItemPublishingStatus, + ContentRepository, + DynamicContent, + Status +} from 'dc-management-sdk-js'; +import { getDefaultLogPath, createLog } from '../../common/log-helpers'; +import { FileLog } from '../../common/file-log'; +import { withOldFilters } from '../../common/filter/facet'; +import { getContent } from '../../common/filter/fetch-content'; +import { ContentDependancyTree } from '../../common/content-item/content-dependancy-tree'; +import { ContentMapping } from '../../common/content-mapping'; +import { progressBar } from '../../common/progress-bar/progress-bar'; +import { ContentItemUnpublishingService } from '../../common/publishing/content-item-unpublishing-service'; + +export const command = 'unpublish [id]'; + +export const desc = 'Unublish Content Items'; + +export const LOG_FILENAME = (platform: string = process.platform): string => + getDefaultLogPath('content-item', 'unpublish', platform); + +export const coerceLog = (logFile: string): FileLog => createLog(logFile, 'Content Items Unpublish Log'); + +export const builder = (yargs: Argv): void => { + yargs + .positional('id', { + type: 'string', + describe: + 'The ID of a content item to be unpublished. If id is not provided, this command will unpublish ALL content items through all content repositories in the hub.' + }) + .option('repoId', { + type: 'string', + describe: 'The ID of a content repository to search items in to be unpublished.', + requiresArg: false + }) + .option('folderId', { + type: 'string', + describe: 'The ID of a folder to search items in to be unpublished.', + requiresArg: false + }) + .option('facet', { + type: 'string', + describe: + "Unpublish content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme." + }) + .alias('f', 'force') + .option('f', { + type: 'boolean', + boolean: true, + describe: 'If present, there will be no confirmation prompt before unpublishing the found content.' + }) + .alias('s', 'silent') + .option('s', { + type: 'boolean', + boolean: true, + describe: 'If present, no log file will be produced.' + }) + .option('logFile', { + type: 'string', + default: LOG_FILENAME, + describe: 'Path to a log file to write to.', + coerce: coerceLog + }) + .option('name', { + type: 'string', + hidden: true + }); +}; + +export const getContentItems = async ({ + client, + id, + hubId, + repoId, + folderId, + facet +}: { + client: DynamicContent; + id?: string | string[]; + hubId: string; + repoId?: string | string[]; + folderId?: string | string[]; + facet?: string; +}): Promise<{ contentItems: ContentItem[]; missingContent: boolean }> => { + try { + let contentItems: ContentItem[] = []; + + if (id != null) { + const itemIds = Array.isArray(id) ? id : [id]; + const items: ContentItem[] = []; + + for (const id of itemIds) { + try { + items.push(await client.contentItems.get(id)); + } catch { + // Missing item. + } + } + + contentItems.push(...items.filter(item => item.status === Status.ACTIVE)); + + return { + contentItems, + missingContent: contentItems.length != itemIds.length + }; + } + + const hub = await client.hubs.get(hubId); + + contentItems = await getContent(client, hub, facet, { repoId, folderId, status: Status.ACTIVE, enrichItems: true }); + + return { contentItems, missingContent: false }; + } catch (err) { + console.log(err); + + return { + contentItems: [], + missingContent: false + }; + } +}; + +export const processItems = async ({ + contentItems, + force, + silent, + logFile, + allContent, + missingContent +}: { + contentItems: ContentItem[]; + force?: boolean; + silent?: boolean; + logFile: FileLog; + allContent: boolean; + missingContent: boolean; +}): Promise => { + if (contentItems.length == 0) { + console.log('Nothing found to unpublish, aborting.'); + return; + } + + const repoContentItems = contentItems.map(content => ({ repo: new ContentRepository(), content })); + const contentTree = new ContentDependancyTree(repoContentItems, new ContentMapping()); + let unpublishChildren = 0; + const rootContentItems = contentTree.all + .filter(node => { + let isTopLevel = true; + + contentTree.traverseDependants( + node, + dependant => { + if (dependant != node && contentTree.all.findIndex(entry => entry === dependant) !== -1) { + isTopLevel = false; + } + }, + true + ); + + if (!isTopLevel) { + unpublishChildren++; + } + + return isTopLevel; + }) + .map(node => node.owner.content); + + const rootContentPublishedItems = rootContentItems.filter( + item => item.publishingStatus !== ContentItemPublishingStatus.UNPUBLISHED + ); + + const log = logFile.open(); + log.appendLine( + `Found ${rootContentPublishedItems.length} items to unpublish. (${unpublishChildren} children included)` + ); + + if (rootContentPublishedItems.length === 0) { + return; + } + + if (!force) { + const yes = await confirmAllContent('unpublish', 'content items', allContent, missingContent); + if (!yes) { + return; + } + } + + log.appendLine(`Unpublishing ${rootContentPublishedItems.length} items.`); + + const unpublishingService = new ContentItemUnpublishingService(); + const contentItemUnpublishJobs: ContentItem[] = []; + const unpublishProgress = progressBar(rootContentPublishedItems.length, 0, { title: 'Unpublishing content items' }); + + for (const item of rootContentPublishedItems) { + try { + await unpublishingService.unpublish(item, contentItem => { + contentItemUnpublishJobs.push(contentItem); + + log.addComment(`Initiated unpublish for "${item.label}"`); + unpublishProgress.increment(); + }); + } catch (e) { + log.appendLine(`\nFailed to initiate unpublish for ${item.label}: ${e.toString()}`); + unpublishProgress.increment(); + } + } + + await unpublishingService.onIdle(); + unpublishProgress.stop(); + + log.appendLine(`The request for content item/s to be unpublished has been completed - please manually verify.`); + + await log.close(!silent); +}; + +export const handler = async (argv: Arguments): Promise => { + const { id, logFile, force, silent, hubId, repoId, folderId } = argv; + const client = dynamicContentClientFactory(argv); + + const facet = withOldFilters(argv.facet, argv); + + const allContent = !id && !facet && !folderId && !repoId; + + if (repoId && id) { + console.log('ID of content item is specified, ignoring repository ID'); + } + + if (id && facet) { + console.log('Please specify either a facet or an ID - not both.'); + return; + } + + if (repoId && folderId) { + console.log('Folder is specified, ignoring repository ID'); + } + + if (allContent) { + console.log('No filter was given, unpublishing all content'); + } + + const { contentItems, missingContent } = await getContentItems({ + client, + id, + hubId, + repoId, + folderId, + facet + }); + + await processItems({ + contentItems, + force, + silent, + logFile, + allContent, + missingContent + }); +}; diff --git a/src/common/publishing/content-item-publishing-job-service.spec.ts b/src/common/publishing/content-item-publishing-job-service.spec.ts index 95a5f760..9c4503d9 100644 --- a/src/common/publishing/content-item-publishing-job-service.spec.ts +++ b/src/common/publishing/content-item-publishing-job-service.spec.ts @@ -5,9 +5,7 @@ import { ContentItemPublishingJobService } from './content-item-publishing-job-s jest.mock('../burstable-queue/burstable-queue', () => ({ BurstableQueue: jest.fn().mockImplementation(() => ({ add: (fn: () => Promise) => fn(), - onIdle: async () => Promise.resolve(), - size: () => 0, - pending: () => 0 + onIdle: async () => Promise.resolve() })) })); diff --git a/src/common/publishing/content-item-publishing-service.spec.ts b/src/common/publishing/content-item-publishing-service.spec.ts index 8ddc8138..5b9060a9 100644 --- a/src/common/publishing/content-item-publishing-service.spec.ts +++ b/src/common/publishing/content-item-publishing-service.spec.ts @@ -4,9 +4,7 @@ import { ContentItemPublishingService } from './content-item-publishing-service' jest.mock('../burstable-queue/burstable-queue', () => ({ BurstableQueue: jest.fn().mockImplementation(() => ({ add: (fn: () => Promise) => fn(), - onIdle: async () => Promise.resolve(), - size: () => 0, - pending: () => 0 + onIdle: async () => Promise.resolve() })) })); @@ -30,7 +28,7 @@ describe('ContentItemPublishingService', () => { item2 = createMockContentItem('item-2', 'job-2'); }); - it('publishes an item and records the job', async () => { + it('publishes a content item and stores the job', async () => { const cb = jest.fn(); await service.publish(item1, cb); diff --git a/src/common/publishing/content-item-unpublishing-service.spec.ts b/src/common/publishing/content-item-unpublishing-service.spec.ts new file mode 100644 index 00000000..359b060a --- /dev/null +++ b/src/common/publishing/content-item-unpublishing-service.spec.ts @@ -0,0 +1,55 @@ +import { ContentItem, ContentItemPublishingStatus } from 'dc-management-sdk-js'; +import { ContentItemUnpublishingService } from './content-item-unpublishing-service'; + +jest.mock('../burstable-queue/burstable-queue', () => { + return { + BurstableQueue: jest.fn().mockImplementation(() => ({ + add: (fn: () => Promise) => fn(), + onIdle: jest.fn().mockResolvedValue(undefined) + })) + }; +}); + +const createMockContentItem = ( + id: string, + status: ContentItemPublishingStatus = ContentItemPublishingStatus.LATEST +): ContentItem => { + return { + id, + publishingStatus: status, + related: { + unpublish: jest.fn().mockImplementationOnce(() => Promise.resolve()) + } + } as unknown as ContentItem; +}; + +describe('ContentItemUnpublishingService', () => { + let service: ContentItemUnpublishingService; + + beforeEach(() => { + service = new ContentItemUnpublishingService(); + }); + + it('unpublishes a published content item', async () => { + const item = createMockContentItem('item-1'); + const action = jest.fn(); + + await service.unpublish(item, action); + await service.onIdle(); + + expect(item.related.unpublish).toHaveBeenCalledTimes(1); + expect(action).toHaveBeenCalledWith(expect.objectContaining({ id: 'item-1' })); + expect(action).toHaveBeenCalledTimes(1); + }); + + it('does not unpublish if content item is already unpublished', async () => { + const item = createMockContentItem('item-2', ContentItemPublishingStatus.UNPUBLISHED); + const action = jest.fn(); + + await service.unpublish(item, action); + await service.onIdle(); + + expect(item.related.unpublish).not.toHaveBeenCalled(); + expect(action).not.toHaveBeenCalled(); + }); +}); diff --git a/src/common/publishing/content-item-unpublishing-service.ts b/src/common/publishing/content-item-unpublishing-service.ts new file mode 100644 index 00000000..26f6281c --- /dev/null +++ b/src/common/publishing/content-item-unpublishing-service.ts @@ -0,0 +1,24 @@ +import { ContentItem, ContentItemPublishingStatus } from 'dc-management-sdk-js'; +import { BurstableQueue } from '../burstable-queue/burstable-queue'; + +export class ContentItemUnpublishingService { + private queue; + + constructor() { + this.queue = new BurstableQueue({}); + } + + async unpublish(contentItem: ContentItem, action: (contentItem: ContentItem) => void) { + this.queue.add(async () => { + if (contentItem.publishingStatus === ContentItemPublishingStatus.UNPUBLISHED) { + return; + } + await contentItem.related.unpublish(); + action(contentItem); + }); + } + + async onIdle() { + return this.queue.onIdle(); + } +} From 149b81285e408f8097bbd1296fe7f9d55da7c8d3 Mon Sep 17 00:00:00 2001 From: DB Date: Tue, 23 Sep 2025 12:20:21 +0100 Subject: [PATCH 20/43] fix: ensure action gets called --- .../publishing/content-item-unpublishing-service.spec.ts | 2 +- src/common/publishing/content-item-unpublishing-service.ts | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/common/publishing/content-item-unpublishing-service.spec.ts b/src/common/publishing/content-item-unpublishing-service.spec.ts index 359b060a..b3ab58f8 100644 --- a/src/common/publishing/content-item-unpublishing-service.spec.ts +++ b/src/common/publishing/content-item-unpublishing-service.spec.ts @@ -50,6 +50,6 @@ describe('ContentItemUnpublishingService', () => { await service.onIdle(); expect(item.related.unpublish).not.toHaveBeenCalled(); - expect(action).not.toHaveBeenCalled(); + expect(action).toHaveBeenCalled(); }); }); diff --git a/src/common/publishing/content-item-unpublishing-service.ts b/src/common/publishing/content-item-unpublishing-service.ts index 26f6281c..0a2de2bf 100644 --- a/src/common/publishing/content-item-unpublishing-service.ts +++ b/src/common/publishing/content-item-unpublishing-service.ts @@ -10,10 +10,10 @@ export class ContentItemUnpublishingService { async unpublish(contentItem: ContentItem, action: (contentItem: ContentItem) => void) { this.queue.add(async () => { - if (contentItem.publishingStatus === ContentItemPublishingStatus.UNPUBLISHED) { - return; + if (contentItem.publishingStatus !== ContentItemPublishingStatus.UNPUBLISHED) { + await contentItem.related.unpublish(); } - await contentItem.related.unpublish(); + action(contentItem); }); } From d5c6104cdb0be3fb698233b247c08b99b6472952 Mon Sep 17 00:00:00 2001 From: DB Date: Tue, 23 Sep 2025 15:37:27 +0100 Subject: [PATCH 21/43] refactor: unpublish method, add tests to cover all statuses --- .../content-item-unpublishing-service.spec.ts | 36 ++++++++++++++++--- .../content-item-unpublishing-service.ts | 4 ++- 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/src/common/publishing/content-item-unpublishing-service.spec.ts b/src/common/publishing/content-item-unpublishing-service.spec.ts index b3ab58f8..30fa6777 100644 --- a/src/common/publishing/content-item-unpublishing-service.spec.ts +++ b/src/common/publishing/content-item-unpublishing-service.spec.ts @@ -30,20 +30,46 @@ describe('ContentItemUnpublishingService', () => { service = new ContentItemUnpublishingService(); }); - it('unpublishes a published content item', async () => { - const item = createMockContentItem('item-1'); + it('unpublishes a content item that has a status of LATEST', async () => { + const item = createMockContentItem('item-latest', ContentItemPublishingStatus.LATEST); + const action = jest.fn(); + + await service.unpublish(item, action); + await service.onIdle(); + + expect(item.related.unpublish).toHaveBeenCalled(); + expect(action).toHaveBeenCalled(); + + expect(item.related.unpublish).toHaveBeenCalledTimes(1); + expect(action).toHaveBeenCalledWith(expect.objectContaining({ id: 'item-latest' })); + expect(action).toHaveBeenCalledTimes(1); + }); + + it('unpublishes a content item that has a status of EARLY', async () => { + const item = createMockContentItem('item-early', ContentItemPublishingStatus.EARLY); const action = jest.fn(); await service.unpublish(item, action); await service.onIdle(); expect(item.related.unpublish).toHaveBeenCalledTimes(1); - expect(action).toHaveBeenCalledWith(expect.objectContaining({ id: 'item-1' })); + expect(action).toHaveBeenCalledWith(expect.objectContaining({ id: 'item-early' })); expect(action).toHaveBeenCalledTimes(1); }); - it('does not unpublish if content item is already unpublished', async () => { - const item = createMockContentItem('item-2', ContentItemPublishingStatus.UNPUBLISHED); + it('does not unpublish if content item has a status of NONE', async () => { + const item = createMockContentItem('item-none', ContentItemPublishingStatus.NONE); + const action = jest.fn(); + + await service.unpublish(item, action); + await service.onIdle(); + + expect(item.related.unpublish).not.toHaveBeenCalled(); + expect(action).toHaveBeenCalled(); + }); + + it('does not unpublish if content item has a status of UNPUBLISHED', async () => { + const item = createMockContentItem('item-unpublished', ContentItemPublishingStatus.UNPUBLISHED); const action = jest.fn(); await service.unpublish(item, action); diff --git a/src/common/publishing/content-item-unpublishing-service.ts b/src/common/publishing/content-item-unpublishing-service.ts index 0a2de2bf..8ca65a56 100644 --- a/src/common/publishing/content-item-unpublishing-service.ts +++ b/src/common/publishing/content-item-unpublishing-service.ts @@ -9,8 +9,10 @@ export class ContentItemUnpublishingService { } async unpublish(contentItem: ContentItem, action: (contentItem: ContentItem) => void) { + const canUnpublish = (state: ContentItemPublishingStatus | undefined) => + state && [ContentItemPublishingStatus.LATEST, ContentItemPublishingStatus.EARLY].includes(state); this.queue.add(async () => { - if (contentItem.publishingStatus !== ContentItemPublishingStatus.UNPUBLISHED) { + if (canUnpublish(contentItem.publishingStatus)) { await contentItem.related.unpublish(); } From 98ccb2d33e56707fb3991bdfa5def17c2d59778d Mon Sep 17 00:00:00 2001 From: DB Date: Fri, 26 Sep 2025 14:15:11 +0100 Subject: [PATCH 22/43] fix: prevent unpublish for item with status NONE --- src/commands/content-item/unpublish.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/commands/content-item/unpublish.ts b/src/commands/content-item/unpublish.ts index 3d28607f..67326277 100644 --- a/src/commands/content-item/unpublish.ts +++ b/src/commands/content-item/unpublish.ts @@ -174,7 +174,9 @@ export const processItems = async ({ .map(node => node.owner.content); const rootContentPublishedItems = rootContentItems.filter( - item => item.publishingStatus !== ContentItemPublishingStatus.UNPUBLISHED + item => + item.publishingStatus !== ContentItemPublishingStatus.UNPUBLISHED && + item.publishingStatus !== ContentItemPublishingStatus.NONE ); const log = logFile.open(); From b9a17b90bf15c5c9e358b35cd7075bd62e0204fe Mon Sep 17 00:00:00 2001 From: DB Date: Mon, 29 Sep 2025 12:51:24 +0100 Subject: [PATCH 23/43] fix: remove unpublish children logic --- src/commands/content-item/unpublish.spec.ts | 19 ++++------ src/commands/content-item/unpublish.ts | 41 ++------------------- 2 files changed, 11 insertions(+), 49 deletions(-) diff --git a/src/commands/content-item/unpublish.spec.ts b/src/commands/content-item/unpublish.spec.ts index 950f4f98..b5189a7f 100644 --- a/src/commands/content-item/unpublish.spec.ts +++ b/src/commands/content-item/unpublish.spec.ts @@ -204,26 +204,23 @@ describe('unpublish tests', () => { }); it('should process all items while filtering out any dependencies and call unpublish', async () => { + const contentItemDependency = new ContentItem({ + id: 'da2ee918-34c3-4fc1-ae05-222222222222', + label: 'No need to unpublish me', + body: { _meta: {} } + }); + const contentItemWithDependency = new ContentItem({ id: 'da2ee918-34c3-4fc1-ae05-111111111111', label: 'Unpublish me', body: { _meta: {}, - dependency: { - _meta: { schema: 'http://bigcontent.io/cms/schema/v1/core#/definitions/content-link' }, - contentType: 'http://bigcontent.io/cms/schema/v1/text', - id: 'da2ee918-34c3-4fc1-ae05-222222222222' - } + dependency: contentItemDependency } }); - const contentItemDependency = new ContentItem({ - id: 'da2ee918-34c3-4fc1-ae05-222222222222', - label: 'No need to unpublish me', - body: { _meta: {} } - }); await processItems({ - contentItems: [contentItemWithDependency, contentItemDependency], + contentItems: [contentItemWithDependency], force: true, silent: true, logFile: mockLog, diff --git a/src/commands/content-item/unpublish.ts b/src/commands/content-item/unpublish.ts index 67326277..35824bd8 100644 --- a/src/commands/content-item/unpublish.ts +++ b/src/commands/content-item/unpublish.ts @@ -4,19 +4,11 @@ import { ConfigurationParameters } from '../configure'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { confirmAllContent } from '../../common/content-item/confirm-all-content'; import PublishOptions from '../../common/publish/publish-options'; -import { - ContentItem, - ContentItemPublishingStatus, - ContentRepository, - DynamicContent, - Status -} from 'dc-management-sdk-js'; +import { ContentItem, ContentItemPublishingStatus, DynamicContent, Status } from 'dc-management-sdk-js'; import { getDefaultLogPath, createLog } from '../../common/log-helpers'; import { FileLog } from '../../common/file-log'; import { withOldFilters } from '../../common/filter/facet'; import { getContent } from '../../common/filter/fetch-content'; -import { ContentDependancyTree } from '../../common/content-item/content-dependancy-tree'; -import { ContentMapping } from '../../common/content-mapping'; import { progressBar } from '../../common/progress-bar/progress-bar'; import { ContentItemUnpublishingService } from '../../common/publishing/content-item-unpublishing-service'; @@ -148,41 +140,14 @@ export const processItems = async ({ return; } - const repoContentItems = contentItems.map(content => ({ repo: new ContentRepository(), content })); - const contentTree = new ContentDependancyTree(repoContentItems, new ContentMapping()); - let unpublishChildren = 0; - const rootContentItems = contentTree.all - .filter(node => { - let isTopLevel = true; - - contentTree.traverseDependants( - node, - dependant => { - if (dependant != node && contentTree.all.findIndex(entry => entry === dependant) !== -1) { - isTopLevel = false; - } - }, - true - ); - - if (!isTopLevel) { - unpublishChildren++; - } - - return isTopLevel; - }) - .map(node => node.owner.content); - - const rootContentPublishedItems = rootContentItems.filter( + const rootContentPublishedItems = contentItems.filter( item => item.publishingStatus !== ContentItemPublishingStatus.UNPUBLISHED && item.publishingStatus !== ContentItemPublishingStatus.NONE ); const log = logFile.open(); - log.appendLine( - `Found ${rootContentPublishedItems.length} items to unpublish. (${unpublishChildren} children included)` - ); + log.appendLine(`Found ${rootContentPublishedItems.length} items to unpublish.`); if (rootContentPublishedItems.length === 0) { return; From 28f53114be75387c63851febe57beb9026346950 Mon Sep 17 00:00:00 2001 From: DB Date: Tue, 30 Sep 2025 13:05:22 +0100 Subject: [PATCH 24/43] fix: log immediate children --- src/commands/content-item/publish.spec.ts | 19 ++++++-------- src/commands/content-item/publish.ts | 32 +++++++---------------- 2 files changed, 18 insertions(+), 33 deletions(-) diff --git a/src/commands/content-item/publish.spec.ts b/src/commands/content-item/publish.spec.ts index d24ec78f..7864a4b1 100644 --- a/src/commands/content-item/publish.spec.ts +++ b/src/commands/content-item/publish.spec.ts @@ -222,29 +222,26 @@ describe('publish tests', () => { }); it('should process all items while filtering out any dependencies and call publish', async () => { + const contentItemDependency = new ContentItem({ + id: 'da2ee918-34c3-4fc1-ae05-222222222222', + label: 'No need to publish me', + body: { _meta: {} } + }); + const contentItemWithDependency = new ContentItem({ id: 'da2ee918-34c3-4fc1-ae05-111111111111', label: 'Publish me', body: { _meta: {}, - dependency: { - _meta: { schema: 'http://bigcontent.io/cms/schema/v1/core#/definitions/content-link' }, - contentType: 'http://bigcontent.io/cms/schema/v1/text', - id: 'da2ee918-34c3-4fc1-ae05-222222222222' - } + dependency: contentItemDependency } }); - const contentItemDependency = new ContentItem({ - id: 'da2ee918-34c3-4fc1-ae05-222222222222', - label: 'No need to publish me', - body: { _meta: {} } - }); // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['Y']); await processItems({ - contentItems: [contentItemWithDependency, contentItemDependency], + contentItems: [contentItemWithDependency], force: true, silent: true, logFile: mockLog, diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index 2a7c4e12..ef5f328a 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -149,31 +149,19 @@ export const processItems = async ({ const repoContentItems = contentItems.map(content => ({ repo: new ContentRepository(), content })); const contentTree = new ContentDependancyTree(repoContentItems, new ContentMapping()); + let publishChildren = 0; - const rootContentItems = contentTree.all - .filter(node => { - let isTopLevel = true; - - contentTree.traverseDependants( - node, - dependant => { - if (dependant != node && contentTree.all.findIndex(entry => entry === dependant) !== -1) { - isTopLevel = false; - } - }, - true - ); - - if (!isTopLevel) { - publishChildren++; - } - return isTopLevel; - }) - .map(node => node.owner.content); + const rootContentItems = contentTree.all.map(node => { + publishChildren += node?.dependancies?.length || 0; + + return node.owner.content; + }); const log = logFile.open(); - log.appendLine(`Found ${rootContentItems.length} items to publish. (${publishChildren} children included)`); + log.appendLine( + `Found ${rootContentItems.length} ${rootContentItems.length > 1 ? 'items' : 'item'} to publish. (${publishChildren} ${publishChildren > 1 ? 'children' : 'child'} included)` + ); if (!force) { const yes = await confirmAllContent('publish', 'content items', allContent, missingContent); @@ -182,7 +170,7 @@ export const processItems = async ({ } } - log.appendLine(`Publishing ${rootContentItems.length} items.`); + log.appendLine(`Publishing ${rootContentItems.length} item/s ${publishChildren > 0 && '(including any children)'}.`); const publishingService = new ContentItemPublishingService(); const contentItemPublishJobs: [ContentItem, PublishingJob][] = []; From 4fe256a277114ff30454497e9f91b5c444cab68e Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Mon, 6 Oct 2025 12:18:03 +0100 Subject: [PATCH 25/43] fix: reintroduce publish content tree child item filtering --- src/commands/content-item/publish.spec.ts | 19 ++++++++------ src/commands/content-item/publish.ts | 31 +++++++++++++++-------- 2 files changed, 32 insertions(+), 18 deletions(-) diff --git a/src/commands/content-item/publish.spec.ts b/src/commands/content-item/publish.spec.ts index 7864a4b1..d24ec78f 100644 --- a/src/commands/content-item/publish.spec.ts +++ b/src/commands/content-item/publish.spec.ts @@ -222,26 +222,29 @@ describe('publish tests', () => { }); it('should process all items while filtering out any dependencies and call publish', async () => { - const contentItemDependency = new ContentItem({ - id: 'da2ee918-34c3-4fc1-ae05-222222222222', - label: 'No need to publish me', - body: { _meta: {} } - }); - const contentItemWithDependency = new ContentItem({ id: 'da2ee918-34c3-4fc1-ae05-111111111111', label: 'Publish me', body: { _meta: {}, - dependency: contentItemDependency + dependency: { + _meta: { schema: 'http://bigcontent.io/cms/schema/v1/core#/definitions/content-link' }, + contentType: 'http://bigcontent.io/cms/schema/v1/text', + id: 'da2ee918-34c3-4fc1-ae05-222222222222' + } } }); + const contentItemDependency = new ContentItem({ + id: 'da2ee918-34c3-4fc1-ae05-222222222222', + label: 'No need to publish me', + body: { _meta: {} } + }); // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['Y']); await processItems({ - contentItems: [contentItemWithDependency], + contentItems: [contentItemWithDependency, contentItemDependency], force: true, silent: true, logFile: mockLog, diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index ef5f328a..526b7386 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -149,18 +149,29 @@ export const processItems = async ({ const repoContentItems = contentItems.map(content => ({ repo: new ContentRepository(), content })); const contentTree = new ContentDependancyTree(repoContentItems, new ContentMapping()); - - let publishChildren = 0; - - const rootContentItems = contentTree.all.map(node => { - publishChildren += node?.dependancies?.length || 0; - - return node.owner.content; - }); + let childCount = 0; + const rootContentItems = contentTree.all + .filter(node => { + let isTopLevel = true; + + contentTree.traverseDependants( + node, + dependant => { + if (dependant != node && contentTree.all.findIndex(entry => entry === dependant) !== -1) { + isTopLevel = false; + childCount++; + } + }, + true + ); + + return isTopLevel; + }) + .map(node => node.owner.content); const log = logFile.open(); log.appendLine( - `Found ${rootContentItems.length} ${rootContentItems.length > 1 ? 'items' : 'item'} to publish. (${publishChildren} ${publishChildren > 1 ? 'children' : 'child'} included)` + `Found ${rootContentItems.length} item(s) to publish (ignoring ${childCount} duplicate child item(s)).` ); if (!force) { @@ -170,7 +181,7 @@ export const processItems = async ({ } } - log.appendLine(`Publishing ${rootContentItems.length} item/s ${publishChildren > 0 && '(including any children)'}.`); + log.appendLine(`Publishing ${rootContentItems.length} item(s).`); const publishingService = new ContentItemPublishingService(); const contentItemPublishJobs: [ContentItem, PublishingJob][] = []; From 11cb2980f7b84f816401411e3ecd77dbd316c5a2 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Tue, 7 Oct 2025 17:30:15 +0100 Subject: [PATCH 26/43] feat: adding commands for content sync jobs - wip --- src/commands/content-item/sync.service.ts | 52 ++++++ src/commands/content-item/sync.ts | 208 ++++++++++++++++++++++ src/commands/job.ts | 20 +++ src/commands/job/get.ts | 32 ++++ src/commands/job/list.ts | 38 ++++ 5 files changed, 350 insertions(+) create mode 100644 src/commands/content-item/sync.service.ts create mode 100644 src/commands/content-item/sync.ts create mode 100644 src/commands/job.ts create mode 100644 src/commands/job/get.ts create mode 100644 src/commands/job/list.ts diff --git a/src/commands/content-item/sync.service.ts b/src/commands/content-item/sync.service.ts new file mode 100644 index 00000000..884c83b3 --- /dev/null +++ b/src/commands/content-item/sync.service.ts @@ -0,0 +1,52 @@ +import { ContentItem, Hub, Job } from 'dc-management-sdk-js'; +import { BurstableQueue } from '../../common/burstable-queue/burstable-queue'; +import { setTimeout } from 'node:timers/promises'; + +const DELAY = 200; + +export class ContentItemSyncService { + private queue; + private _failedJobs: Job[] = []; + + constructor() { + this.queue = new BurstableQueue({ concurrency: 1 }); + } + + sync(destinationHubId: string, hub: Hub, contentItem: ContentItem, action: (job: Job) => void): void { + this.queue.add(async () => { + const createSyncJob = await hub.related.jobs.createDeepSyncJob( + new Job({ + label: `dc-cli content item: ${contentItem.label}`, + ignoreSchemaValidation: true, + destinationHubId, + input: { rootContentItemIds: [contentItem.id] } + }) + ); + + const completedJob = await this.waitForJobCompletion(createSyncJob.jobId, hub); + + if (completedJob.status === 'FAILED') { + this._failedJobs.push(completedJob); + } + + action(completedJob); + }); + } + + private async waitForJobCompletion(jobId: string, hub: Hub): Promise { + let syncJob = await hub.related.jobs.get(jobId); + while (syncJob.status === 'CREATED' || syncJob.status === 'IN_PROGRESS') { + await setTimeout(DELAY); + syncJob = await hub.related.jobs.get(syncJob.id); + } + return syncJob; + } + + async onIdle(): Promise { + return this.queue.onIdle(); + } + + get failedJobs(): Job[] { + return this._failedJobs; + } +} diff --git a/src/commands/content-item/sync.ts b/src/commands/content-item/sync.ts new file mode 100644 index 00000000..3a4f1e30 --- /dev/null +++ b/src/commands/content-item/sync.ts @@ -0,0 +1,208 @@ +import { Arguments, Argv } from 'yargs'; +import { ConfigurationParameters } from '../configure'; +import { createLog, getDefaultLogPath } from '../../common/log-helpers'; +import { FileLog } from '../../common/file-log'; +import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; +import { withOldFilters } from '../../common/filter/facet'; +import { ContentItem, ContentRepository, DynamicContent, Hub, Job, Status } from 'dc-management-sdk-js'; +import { getContent } from '../../common/filter/fetch-content'; +import { ContentMapping } from '../../common/content-mapping'; +import { ContentDependancyTree } from '../../common/content-item/content-dependancy-tree'; +import { confirmAllContent } from '../../common/content-item/confirm-all-content'; +import { progressBar } from '../../common/progress-bar/progress-bar'; +import { ContentItemSyncService } from './sync.service'; + +export const LOG_FILENAME = (platform: string = process.platform): string => + getDefaultLogPath('content-item', 'sync', platform); + +export const coerceLog = (logFile: string): FileLog => createLog(logFile, 'Content Items Sync Log'); + +export const command = 'sync [id]'; + +export const desc = 'Sync Content Items'; + +export const builder = (yargs: Argv): void => { + yargs + .positional('id', { + type: 'string', + describe: `The ID of a content item to sync. If id is not provided, this command will sync ALL content items through all content repositories in the hub.` + }) + .option('repoId', { + type: 'string', + describe: 'The ID of a content repository to search items in to be sync.', + requiresArg: false + }) + .option('folderId', { + type: 'string', + describe: 'The ID of a folder to search items in to be sync.', + requiresArg: false + }) + .option('facet', { + type: 'string', + describe: + "Publish content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme." + }) + .alias('f', 'force') + .option('f', { + type: 'boolean', + boolean: true, + describe: 'If present, there will be no confirmation prompt before publishing the found content.' + }) + .alias('s', 'silent') + .option('s', { + type: 'boolean', + boolean: true, + describe: 'If present, no log file will be produced.' + }) + .option('logFile', { + type: 'string', + default: LOG_FILENAME, + describe: 'Path to a log file to write to.', + coerce: coerceLog + }) + .option('destinationHubId', { + type: 'string', + describe: 'The ID of a destination hub to sync with.', + requiresArg: true, + demandOption: true + }); +}; + +export const fetchContentByIds = async (client: DynamicContent, ids: string[]) => { + const contentItems: ContentItem[] = []; + + for (const id of ids) { + try { + contentItems.push(await client.contentItems.get(id)); + } catch (e) { + throw new Error(`Missing content item with id: ${id}: ${e.message} `); + } + } + + return contentItems.filter(item => item.status === Status.ACTIVE); +}; + +export const listContent = async ( + client: DynamicContent, + hub: Hub, + { + repoId, + folderId, + facet, + status + }: { repoId?: string | string[]; folderId?: string | string[]; facet?: string; status?: Status } +) => { + return await getContent(client, hub, facet, { repoId, folderId, status: status || Status.ACTIVE, enrichItems: true }); +}; + +export const getRootContentItems = (contentItems: ContentItem[]) => { + const repoContentItems = contentItems.map(content => ({ repo: new ContentRepository(), content })); + const contentTree = new ContentDependancyTree(repoContentItems, new ContentMapping()); + const rootContentItems = contentTree.all + .filter(node => { + let isTopLevel = true; + + contentTree.traverseDependants( + node, + dependant => { + if (dependant != node && contentTree.all.findIndex(entry => entry === dependant) !== -1) { + isTopLevel = false; + } + }, + true + ); + + return isTopLevel; + }) + .map(node => node.owner.content); + + return rootContentItems; +}; + +export default interface SyncOptions { + id?: string | string[]; + repoId?: string | string[]; + folderId?: string | string[]; + facet?: string; + logFile: FileLog; + force?: boolean; + silent?: boolean; + destinationHubId: string; +} + +export const handler = async (argv: Arguments): Promise => { + const { id, logFile, force, silent, hubId, repoId, folderId, destinationHubId } = argv; + const client = dynamicContentClientFactory(argv); + + const facet = withOldFilters(argv.facet, argv); + + if (repoId && id) { + console.log('ID of content item is specified, ignoring repository ID'); + } + + if (id && facet) { + console.log('Please specify either a facet or an ID - not both'); + return; + } + + if (repoId && folderId) { + console.log('Folder is specified, ignoring repository ID'); + } + + const allContent = !id && !facet && !folderId && !repoId; + if (allContent) { + console.log('No filter was given, syncing all content'); + } + + const hub = await client.hubs.get(hubId); + + const contentItems = id + ? await fetchContentByIds(client, Array.isArray(id) ? id : [id]) + : await listContent(client, hub, { repoId, folderId, facet }); + + if (!contentItems.length) { + console.log('Nothing found to sync, aborting'); + } + + const rootContentItems = getRootContentItems(contentItems); + const log = logFile.open(); + + log.appendLine( + `Found ${rootContentItems.length} item(s) to sync (ignoring ${contentItems.length - rootContentItems.length} duplicate child item(s))` + ); + + if (!force) { + const yes = await confirmAllContent('sync', 'content items', allContent, false); + if (!yes) { + return; + } + } + + log.appendLine(`Syncing ${rootContentItems.length} item(s)`); + + const syncProgress = progressBar(rootContentItems.length, 0, { title: 'Syncing content items' }); + const syncService = new ContentItemSyncService(); + + rootContentItems.forEach(contentItem => { + log.addComment(`Requesting content item sync: ${contentItem.label}`); + syncService.sync(destinationHubId, hub, contentItem, (syncJob: Job) => { + syncProgress.increment(); + if (syncJob.status === 'FAILED') { + log.addComment(`Failed content item sync job ${syncJob.id}: ${JSON.stringify(syncJob.errors)}`); + return; + } + log.addComment(`Content item synced: ${contentItem.label} (jobId: ${syncJob.id}) ${JSON.stringify(syncJob)}`); + }); + }); + + await syncService.onIdle(); + syncProgress.stop(); + + const failedJobsMsg = syncService.failedJobs.length + ? `with ${syncService.failedJobs.length} failed jobs - check logs for details` + : ``; + + log.appendLine(`Sync complete ${failedJobsMsg}`); + + await log.close(!silent); +}; diff --git a/src/commands/job.ts b/src/commands/job.ts new file mode 100644 index 00000000..e33565b8 --- /dev/null +++ b/src/commands/job.ts @@ -0,0 +1,20 @@ +import { Argv } from 'yargs'; +import { readConfig } from '../cli'; +import YargsCommandBuilderOptions from '../common/yargs/yargs-command-builder-options'; +import { configureCommandOptions } from './configure'; + +export const command = 'job'; + +export const desc = 'Job'; + +export const builder = (yargs: Argv): Argv => + yargs + .commandDir('job', YargsCommandBuilderOptions) + .options(configureCommandOptions) + .config('config', readConfig) + .demandCommand() + .help(); + +export const handler = (): void => { + /* do nothing */ +}; diff --git a/src/commands/job/get.ts b/src/commands/job/get.ts new file mode 100644 index 00000000..3baa6e13 --- /dev/null +++ b/src/commands/job/get.ts @@ -0,0 +1,32 @@ +import { Arguments, Argv } from 'yargs'; +import DataPresenter, { RenderingArguments, RenderingOptions } from '../../view/data-presenter'; +import { BuilderOptions } from '../content-repository/assign-content-type'; +import { ConfigurationParameters } from '../configure'; +import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; +import { singleItemTableOptions } from '../../common/table/table.consts'; + +export const command = 'get '; + +export const desc = 'Get a Job by ID'; + +export const builder = (yargs: Argv): void => { + yargs + .positional('id', { + describe: 'Job ID', + type: 'string' + }) + .options(RenderingOptions); +}; + +export const handler = async ( + argv: Arguments +): Promise => { + const client = dynamicContentClientFactory(argv); + const hub = await client.hubs.get(argv.hubId); + const job = await hub.related.jobs.get(argv.id); + + new DataPresenter(job.toJSON()).render({ + json: argv.json, + tableUserConfig: singleItemTableOptions + }); +}; diff --git a/src/commands/job/list.ts b/src/commands/job/list.ts new file mode 100644 index 00000000..9682b6d6 --- /dev/null +++ b/src/commands/job/list.ts @@ -0,0 +1,38 @@ +import { Arguments, Argv } from 'yargs'; +import DataPresenter, { RenderingArguments, RenderingOptions } from '../../view/data-presenter'; +import { BuilderOptions } from '../content-repository/assign-content-type'; +import { ConfigurationParameters } from '../configure'; +import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; +import { extractSortable, PagingParameters } from '../../common/yargs/sorting-options'; +import paginator from '../../common/dc-management-sdk-js/paginator'; +import { Job } from 'dc-management-sdk-js'; + +export const command = 'list'; + +export const desc = 'List jobs'; + +export const builder = (yargs: Argv): void => { + yargs.options(RenderingOptions); +}; + +export const itemMapFn = ({ id, label, status, jobType, originHubId, destinationHubId }: Job): object => ({ + id, + label, + status, + jobType, + originHubId, + destinationHubId +}); + +export const handler = async ( + argv: Arguments +): Promise => { + const client = dynamicContentClientFactory(argv); + const hub = await client.hubs.get(argv.hubId); + const contentTypeList = await paginator(hub.related.jobs.list, extractSortable(argv)); + + new DataPresenter(contentTypeList.map(value => value.toJSON())).render({ + json: argv.json, + itemMapFn: itemMapFn + }); +}; From 1491b6eb91f9e37d70e2ae1b0f08d3fd16bcd467 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Wed, 8 Oct 2025 10:34:51 +0100 Subject: [PATCH 27/43] feat: making content item fetch reusable --- src/commands/content-item/sync.ts | 72 +++---------------- .../content-item/get-content-items-by-ids.ts | 15 ++++ .../content-item/get-root-content-items.ts | 27 +++++++ 3 files changed, 52 insertions(+), 62 deletions(-) create mode 100644 src/common/content-item/get-content-items-by-ids.ts create mode 100644 src/common/content-item/get-root-content-items.ts diff --git a/src/commands/content-item/sync.ts b/src/commands/content-item/sync.ts index 3a4f1e30..98f5959f 100644 --- a/src/commands/content-item/sync.ts +++ b/src/commands/content-item/sync.ts @@ -4,13 +4,13 @@ import { createLog, getDefaultLogPath } from '../../common/log-helpers'; import { FileLog } from '../../common/file-log'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { withOldFilters } from '../../common/filter/facet'; -import { ContentItem, ContentRepository, DynamicContent, Hub, Job, Status } from 'dc-management-sdk-js'; +import { Job, Status } from 'dc-management-sdk-js'; import { getContent } from '../../common/filter/fetch-content'; -import { ContentMapping } from '../../common/content-mapping'; -import { ContentDependancyTree } from '../../common/content-item/content-dependancy-tree'; import { confirmAllContent } from '../../common/content-item/confirm-all-content'; import { progressBar } from '../../common/progress-bar/progress-bar'; import { ContentItemSyncService } from './sync.service'; +import { getRootContentItems } from '../../common/content-item/get-root-content-items'; +import { getContentByIds } from '../../common/content-item/get-content-items-by-ids'; export const LOG_FILENAME = (platform: string = process.platform): string => getDefaultLogPath('content-item', 'sync', platform); @@ -68,57 +68,6 @@ export const builder = (yargs: Argv): void => { }); }; -export const fetchContentByIds = async (client: DynamicContent, ids: string[]) => { - const contentItems: ContentItem[] = []; - - for (const id of ids) { - try { - contentItems.push(await client.contentItems.get(id)); - } catch (e) { - throw new Error(`Missing content item with id: ${id}: ${e.message} `); - } - } - - return contentItems.filter(item => item.status === Status.ACTIVE); -}; - -export const listContent = async ( - client: DynamicContent, - hub: Hub, - { - repoId, - folderId, - facet, - status - }: { repoId?: string | string[]; folderId?: string | string[]; facet?: string; status?: Status } -) => { - return await getContent(client, hub, facet, { repoId, folderId, status: status || Status.ACTIVE, enrichItems: true }); -}; - -export const getRootContentItems = (contentItems: ContentItem[]) => { - const repoContentItems = contentItems.map(content => ({ repo: new ContentRepository(), content })); - const contentTree = new ContentDependancyTree(repoContentItems, new ContentMapping()); - const rootContentItems = contentTree.all - .filter(node => { - let isTopLevel = true; - - contentTree.traverseDependants( - node, - dependant => { - if (dependant != node && contentTree.all.findIndex(entry => entry === dependant) !== -1) { - isTopLevel = false; - } - }, - true - ); - - return isTopLevel; - }) - .map(node => node.owner.content); - - return rootContentItems; -}; - export default interface SyncOptions { id?: string | string[]; repoId?: string | string[]; @@ -157,8 +106,8 @@ export const handler = async (argv: Arguments { log.addComment(`Requesting content item sync: ${contentItem.label}`); syncService.sync(destinationHubId, hub, contentItem, (syncJob: Job) => { - syncProgress.increment(); + progress.increment(); if (syncJob.status === 'FAILED') { log.addComment(`Failed content item sync job ${syncJob.id}: ${JSON.stringify(syncJob.errors)}`); return; @@ -196,11 +145,10 @@ export const handler = async (argv: Arguments { + const contentItems: ContentItem[] = []; + + for (const id of ids) { + try { + contentItems.push(await client.contentItems.get(id)); + } catch (e) { + throw new Error(`Missing content item with id: ${id}: ${e.message} `); + } + } + + return contentItems; +}; diff --git a/src/common/content-item/get-root-content-items.ts b/src/common/content-item/get-root-content-items.ts new file mode 100644 index 00000000..707501bb --- /dev/null +++ b/src/common/content-item/get-root-content-items.ts @@ -0,0 +1,27 @@ +import { ContentItem, ContentRepository } from 'dc-management-sdk-js'; +import { ContentMapping } from '../content-mapping'; +import { ContentDependancyTree } from './content-dependancy-tree'; + +export const getRootContentItems = (contentItems: ContentItem[]) => { + const repoContentItems = contentItems.map(content => ({ repo: new ContentRepository(), content })); + const contentTree = new ContentDependancyTree(repoContentItems, new ContentMapping()); + const rootContentItems = contentTree.all + .filter(node => { + let isTopLevel = true; + + contentTree.traverseDependants( + node, + dependant => { + if (dependant != node && contentTree.all.findIndex(entry => entry === dependant) !== -1) { + isTopLevel = false; + } + }, + true + ); + + return isTopLevel; + }) + .map(node => node.owner.content); + + return rootContentItems; +}; From 0cf11b17d38dacb3fd0d10ee560fd2b037c21623 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Thu, 9 Oct 2025 11:25:57 +0100 Subject: [PATCH 28/43] feat: sync test coverage and minor fixes --- src/__snapshots__/cli.spec.ts.snap | 3 +- .../content-item/sync.service.spec.ts | 87 ++++++++ src/commands/content-item/sync.spec.ts | 207 ++++++++++++++++++ src/commands/content-item/sync.ts | 19 +- src/commands/job.spec.ts | 12 + src/commands/job/get.spec.ts | 69 ++++++ src/commands/job/get.ts | 5 +- src/commands/job/list.spec.ts | 62 ++++++ src/commands/job/list.ts | 10 +- .../get-content-items-by-ids.spec.ts | 51 +++++ .../content-item/get-content-items-by-ids.ts | 2 +- .../get-independent-content-items.spec.ts | 73 ++++++ ...ms.ts => get-independent-content-items.ts} | 6 +- 13 files changed, 586 insertions(+), 20 deletions(-) create mode 100644 src/commands/content-item/sync.service.spec.ts create mode 100644 src/commands/content-item/sync.spec.ts create mode 100644 src/commands/job.spec.ts create mode 100644 src/commands/job/get.spec.ts create mode 100644 src/commands/job/list.spec.ts create mode 100644 src/common/content-item/get-content-items-by-ids.spec.ts create mode 100644 src/common/content-item/get-independent-content-items.spec.ts rename src/common/content-item/{get-root-content-items.ts => get-independent-content-items.ts} (82%) diff --git a/src/__snapshots__/cli.spec.ts.snap b/src/__snapshots__/cli.spec.ts.snap index c1d8338c..e1a1de55 100644 --- a/src/__snapshots__/cli.spec.ts.snap +++ b/src/__snapshots__/cli.spec.ts.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing exports[`cli should create a yarg instance if one is not supplied 1`] = ` "dc-cli @@ -12,6 +12,7 @@ Commands: dc-cli event Event dc-cli extension Extension dc-cli hub Hub + dc-cli job Job dc-cli search-index Search Index dc-cli settings Settings diff --git a/src/commands/content-item/sync.service.spec.ts b/src/commands/content-item/sync.service.spec.ts new file mode 100644 index 00000000..34b129a8 --- /dev/null +++ b/src/commands/content-item/sync.service.spec.ts @@ -0,0 +1,87 @@ +import { ContentItem, Hub, Job } from 'dc-management-sdk-js'; +import { ContentItemSyncService } from './sync.service'; + +const createMockHub = (id: string) => { + return { ...new Hub({ id }), ...{ related: { jobs: { createDeepSyncJob: jest.fn(), get: jest.fn() } } } }; +}; + +describe('sync.service', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + describe('ContentItemSyncService', () => { + describe('sync', () => { + it('should add a content item sync job to the queue and process the queue item', async () => { + const JOB_ID = '68e5289f0aba3024bde050f9'; + const DEST_HUB_ID = '67d2a201642fa239dbe1523d'; + const CONTENT_ITEM_ID = 'c5b659df-680e-4711-bfbe-84eaa10d76cc'; + const contentItem = new ContentItem({ id: CONTENT_ITEM_ID, label: 'sync service test' }); + const hub = createMockHub(CONTENT_ITEM_ID); + + hub.related.jobs.createDeepSyncJob.mockResolvedValue(new Job({ jobId: JOB_ID })); + hub.related.jobs.get.mockResolvedValue(new Job({ id: JOB_ID, status: 'COMPLETED' })); + + const syncService = new ContentItemSyncService(); + syncService.sync(DEST_HUB_ID, hub as unknown as Hub, contentItem, () => {}); + await syncService.onIdle(); + + expect(hub.related.jobs.createDeepSyncJob).toHaveBeenCalledWith({ + label: `dc-cli content item: sync service test`, + ignoreSchemaValidation: true, + destinationHubId: DEST_HUB_ID, + input: { rootContentItemIds: [CONTENT_ITEM_ID] } + }); + expect(hub.related.jobs.get).toHaveBeenNthCalledWith(1, JOB_ID); + expect(syncService.failedJobs.length).toEqual(0); + }); + it('should add a content item sync job to the queue, process and wait for a completed job', async () => { + const JOB_ID = '68e5289f0aba3024bde050f9'; + const DEST_HUB_ID = '67d2a201642fa239dbe1523d'; + const CONTENT_ITEM_ID = 'c5b659df-680e-4711-bfbe-84eaa10d76cc'; + const contentItem = new ContentItem({ id: CONTENT_ITEM_ID, label: 'sync service test' }); + const hub = createMockHub(CONTENT_ITEM_ID); + + hub.related.jobs.createDeepSyncJob.mockResolvedValue(new Job({ jobId: JOB_ID })); + hub.related.jobs.get + .mockResolvedValueOnce(new Job({ id: JOB_ID, status: 'IN_PROGRESS' })) + .mockResolvedValueOnce(new Job({ id: JOB_ID, status: 'COMPLETED' })); + + const syncService = new ContentItemSyncService(); + syncService.sync(DEST_HUB_ID, hub as unknown as Hub, contentItem, () => {}); + await syncService.onIdle(); + + expect(hub.related.jobs.createDeepSyncJob).toHaveBeenCalledWith({ + label: `dc-cli content item: sync service test`, + ignoreSchemaValidation: true, + destinationHubId: DEST_HUB_ID, + input: { rootContentItemIds: [CONTENT_ITEM_ID] } + }); + expect(hub.related.jobs.get).toHaveBeenNthCalledWith(2, JOB_ID); + expect(syncService.failedJobs.length).toEqual(0); + }); + it('should add a content item sync job to the queue, process and store a failed job', async () => { + const JOB_ID = '68e5289f0aba3024bde050f9'; + const DEST_HUB_ID = '67d2a201642fa239dbe1523d'; + const CONTENT_ITEM_ID = 'c5b659df-680e-4711-bfbe-84eaa10d76cc'; + const contentItem = new ContentItem({ id: CONTENT_ITEM_ID, label: 'sync service test' }); + const hub = createMockHub(CONTENT_ITEM_ID); + + hub.related.jobs.createDeepSyncJob.mockResolvedValue(new Job({ jobId: JOB_ID })); + hub.related.jobs.get.mockResolvedValueOnce(new Job({ id: JOB_ID, status: 'FAILED' })); + + const syncService = new ContentItemSyncService(); + syncService.sync(DEST_HUB_ID, hub as unknown as Hub, contentItem, () => {}); + await syncService.onIdle(); + + expect(hub.related.jobs.createDeepSyncJob).toHaveBeenCalledWith({ + label: `dc-cli content item: sync service test`, + ignoreSchemaValidation: true, + destinationHubId: DEST_HUB_ID, + input: { rootContentItemIds: [CONTENT_ITEM_ID] } + }); + expect(hub.related.jobs.get).toHaveBeenNthCalledWith(1, JOB_ID); + expect(syncService.failedJobs.length).toEqual(1); + }); + }); + }); +}); diff --git a/src/commands/content-item/sync.spec.ts b/src/commands/content-item/sync.spec.ts new file mode 100644 index 00000000..b0893c3d --- /dev/null +++ b/src/commands/content-item/sync.spec.ts @@ -0,0 +1,207 @@ +import Yargs from 'yargs/yargs'; +import readline from 'readline'; +import { builder, coerceLog, command, handler, LOG_FILENAME } from './sync'; +import { FileLog } from '../../common/file-log'; + +import { getContentByIds } from '../../common/content-item/get-content-items-by-ids'; +import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; +import { ContentItem, Hub, Job } from 'dc-management-sdk-js'; +import { getContent } from '../../common/filter/fetch-content'; + +jest.mock('readline'); + +const mockSync = jest.fn(); +const mockOnIdle = jest.fn(); +const mockFailedJobs = jest.fn(); +jest.mock('./sync.service', () => { + return { + ContentItemSyncService: jest.fn().mockImplementation(() => { + return { + sync: mockSync, + onIdle: mockOnIdle, + failedJobs: mockFailedJobs + }; + }) + }; +}); + +jest.mock('../../common/content-item/get-content-items-by-ids', () => { + return { + getContentByIds: jest.fn() + }; +}); +jest.mock('../../common/filter/fetch-content', () => { + return { + getContent: jest.fn() + }; +}); +jest.mock('../../services/dynamic-content-client-factory'); + +describe('content-item sync', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should command should defined', function () { + expect(command).toEqual('sync [id]'); + }); + + describe('builder', () => { + it('should configure command arguments', () => { + const argv = Yargs(process.argv.slice(2)); + const spyPositional = jest.spyOn(argv, 'positional').mockReturnThis(); + const spyOption = jest.spyOn(argv, 'option').mockReturnThis(); + + builder(argv); + + expect(spyPositional).toHaveBeenCalledWith('id', { + type: 'string', + describe: `The ID of a content item to sync. If id is not provided, this command will sync ALL content items through all content repositories in the hub.` + }); + + expect(spyOption).toHaveBeenCalledWith('repoId', { + type: 'string', + describe: 'The ID of a content repository to search items in to be sync.', + requiresArg: false + }); + + expect(spyOption).toHaveBeenCalledWith('folderId', { + type: 'string', + describe: 'The ID of a folder to search items in to be sync.', + requiresArg: false + }); + + expect(spyOption).toHaveBeenCalledWith('facet', { + type: 'string', + describe: + "Publish content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme." + }); + + expect(spyOption).toHaveBeenCalledWith('f', { + type: 'boolean', + boolean: true, + describe: 'If present, there will be no confirmation prompt before publishing the found content.' + }); + + expect(spyOption).toHaveBeenCalledWith('s', { + type: 'boolean', + boolean: true, + describe: 'If present, no log file will be produced.' + }); + + expect(spyOption).toHaveBeenCalledWith('logFile', { + type: 'string', + default: LOG_FILENAME, + describe: 'Path to a log file to write to.', + coerce: coerceLog + }); + + expect(spyOption).toHaveBeenCalledWith('destinationHubId', { + type: 'string', + describe: 'The ID of a destination hub to sync with.', + requiresArg: true, + demandOption: true + }); + }); + }); + + describe('handler', () => { + const HUB_ID = '67d1c1c7642fa239dbe15164'; + const DEST_HUB_ID = '67d2a201642fa239dbe1523d'; + const globalArgs = { + $0: 'test', + _: ['test'], + json: true, + clientId: 'client-id', + clientSecret: 'client-secret', + hubId: HUB_ID + }; + + const mockLog = { + open: jest.fn().mockReturnValue({ + appendLine: jest.fn(), + addComment: jest.fn(), + close: jest.fn() + }) + } as unknown as FileLog; + + it('should sync content item by id', async () => { + const CONTENT_ITEM_ID = 'c5b659df-680e-4711-bfbe-84eaa10d76cc'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (readline as any).setResponses(['y']); + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } + }); + (getContentByIds as unknown as jest.Mock).mockResolvedValue([ + new ContentItem({ id: CONTENT_ITEM_ID, body: { _meta: {} } }) + ]); + + mockSync.mockImplementation((destinationHubId, hub, contentItem, fn) => { + fn(new Job({ id: '68e5289f0aba3024bde050f9', status: 'COMPLETE' })); + }); + + mockFailedJobs.mockReturnValue(0); + + await handler({ + ...globalArgs, + id: CONTENT_ITEM_ID, + destinationHubId: DEST_HUB_ID, + logFile: mockLog + }); + + expect(getContentByIds).toHaveBeenCalledWith(expect.any(Object), [CONTENT_ITEM_ID]); + expect(mockSync).toHaveBeenCalledTimes(1); + expect(mockSync).toHaveBeenCalledWith( + DEST_HUB_ID, + expect.any(Hub), + expect.any(ContentItem), + expect.any(Function) + ); + }); + it('should sync content items by query', async () => { + const CONTENT_ITEM_ID = 'c5b659df-680e-4711-bfbe-84eaa10d76cc'; + const REPOSITORY_ID = 'c5b659df-680e-4711-bfbe-84eaa10d76cc'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (readline as any).setResponses(['y']); + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } + }); + (getContent as unknown as jest.Mock).mockResolvedValue([ + new ContentItem({ id: CONTENT_ITEM_ID, body: { _meta: {} } }) + ]); + + mockSync.mockImplementation((destinationHubId, hub, contentItem, fn) => { + fn(new Job({ id: '68e5289f0aba3024bde050f9', status: 'COMPLETE' })); + }); + + mockFailedJobs.mockReturnValue(0); + + await handler({ + ...globalArgs, + repoId: REPOSITORY_ID, + destinationHubId: DEST_HUB_ID, + logFile: mockLog + }); + + expect(getContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Hub), undefined, { + repoId: REPOSITORY_ID, + folderId: undefined, + enrichItems: true, + status: 'ACTIVE' + }); + expect(mockSync).toHaveBeenCalledTimes(1); + expect(mockSync).toHaveBeenCalledWith( + DEST_HUB_ID, + expect.any(Hub), + expect.any(ContentItem), + expect.any(Function) + ); + }); + }); +}); diff --git a/src/commands/content-item/sync.ts b/src/commands/content-item/sync.ts index 98f5959f..2a2c790b 100644 --- a/src/commands/content-item/sync.ts +++ b/src/commands/content-item/sync.ts @@ -9,7 +9,7 @@ import { getContent } from '../../common/filter/fetch-content'; import { confirmAllContent } from '../../common/content-item/confirm-all-content'; import { progressBar } from '../../common/progress-bar/progress-bar'; import { ContentItemSyncService } from './sync.service'; -import { getRootContentItems } from '../../common/content-item/get-root-content-items'; +import { getIndependentContentItems } from '../../common/content-item/get-independent-content-items'; import { getContentByIds } from '../../common/content-item/get-content-items-by-ids'; export const LOG_FILENAME = (platform: string = process.platform): string => @@ -69,7 +69,7 @@ export const builder = (yargs: Argv): void => { }; export default interface SyncOptions { - id?: string | string[]; + id?: string; repoId?: string | string[]; folderId?: string | string[]; facet?: string; @@ -106,14 +106,14 @@ export const handler = async (argv: Arguments { progress.increment(); - if (syncJob.status === 'FAILED') { - log.addComment(`Failed content item sync job ${syncJob.id}: ${JSON.stringify(syncJob.errors)}`); - return; - } - log.addComment(`Content item synced: ${contentItem.label} (jobId: ${syncJob.id}) ${JSON.stringify(syncJob)}`); + const logComment = + syncJob.status === 'FAILED' + ? `Failed content item sync job ${syncJob.id}: ${JSON.stringify(syncJob.errors)}` + : `Content item synced: ${contentItem.label} (jobId: ${syncJob.id})`; + + log.addComment(logComment); }); }); diff --git a/src/commands/job.spec.ts b/src/commands/job.spec.ts new file mode 100644 index 00000000..e6484c3e --- /dev/null +++ b/src/commands/job.spec.ts @@ -0,0 +1,12 @@ +import Yargs from 'yargs/yargs'; +import { builder } from './job'; +import YargsCommandBuilderOptions from '../common/yargs/yargs-command-builder-options'; + +describe('job command', () => { + it('should build', () => { + const argv = Yargs(process.argv.slice(2)); + const spyCommandDir = jest.spyOn(argv, 'commandDir').mockReturnValue(argv); + builder(argv); + expect(spyCommandDir).toHaveBeenCalledWith('job', YargsCommandBuilderOptions); + }); +}); diff --git a/src/commands/job/get.spec.ts b/src/commands/job/get.spec.ts new file mode 100644 index 00000000..f1dca9c7 --- /dev/null +++ b/src/commands/job/get.spec.ts @@ -0,0 +1,69 @@ +import Yargs from 'yargs/yargs'; +import { builder, command, handler } from './get'; +import DataPresenter, { RenderingOptions } from '../../view/data-presenter'; +import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; +import { Hub, Job } from 'dc-management-sdk-js'; +import { singleItemTableOptions } from '../../common/table/table.consts'; + +jest.mock('../../services/dynamic-content-client-factory'); +jest.mock('../../view/data-presenter'); + +describe('job get command', () => { + it('should command should defined', function () { + expect(command).toEqual('get '); + }); + + describe('builder', () => { + it('should configure command arguments', function () { + const argv = Yargs(process.argv.slice(2)); + const spyPositional = jest.spyOn(argv, 'positional').mockReturnThis(); + const spyOptions = jest.spyOn(argv, 'options').mockReturnThis(); + + builder(argv); + + expect(spyPositional).toHaveBeenCalledWith('id', { + describe: 'Job ID', + type: 'string' + }); + expect(spyOptions).toHaveBeenCalledWith(RenderingOptions); + }); + }); + + describe('handler', () => { + const yargArgs = { + $0: 'test', + _: ['test'], + json: true + }; + const config = { + clientId: 'client-id', + clientSecret: 'client-secret', + hubId: '67d1c1c7642fa239dbe15164' + }; + + it('should get a job by id', async () => { + const mockDataPresenter = DataPresenter as jest.Mock; + const mockGetHub = jest.fn(); + const mockGetJob = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue({ + ...new Hub({ id: '67d1c1c7642fa239dbe15164' }), + related: { jobs: { get: mockGetJob.mockResolvedValue(new Job({ id: '68e5289f0aba3024bde050f9' })) } } + }) + } + }); + const argv = { ...yargArgs, id: '68e5289f0aba3024bde050f9', ...config }; + + await handler(argv); + + expect(mockGetHub).toHaveBeenCalledWith('67d1c1c7642fa239dbe15164'); + expect(mockGetJob).toHaveBeenCalledWith('68e5289f0aba3024bde050f9'); + expect(mockDataPresenter).toHaveBeenCalledWith({ id: '68e5289f0aba3024bde050f9' }); + expect(mockDataPresenter.mock.instances[0].render).toHaveBeenCalledWith({ + json: argv.json, + tableUserConfig: singleItemTableOptions + }); + }); + }); +}); diff --git a/src/commands/job/get.ts b/src/commands/job/get.ts index 3baa6e13..8c36d81f 100644 --- a/src/commands/job/get.ts +++ b/src/commands/job/get.ts @@ -1,6 +1,5 @@ import { Arguments, Argv } from 'yargs'; import DataPresenter, { RenderingArguments, RenderingOptions } from '../../view/data-presenter'; -import { BuilderOptions } from '../content-repository/assign-content-type'; import { ConfigurationParameters } from '../configure'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { singleItemTableOptions } from '../../common/table/table.consts'; @@ -18,6 +17,10 @@ export const builder = (yargs: Argv): void => { .options(RenderingOptions); }; +export interface BuilderOptions { + id: string; +} + export const handler = async ( argv: Arguments ): Promise => { diff --git a/src/commands/job/list.spec.ts b/src/commands/job/list.spec.ts new file mode 100644 index 00000000..4e2b24d9 --- /dev/null +++ b/src/commands/job/list.spec.ts @@ -0,0 +1,62 @@ +import { command, handler, itemMapFn } from './list'; +import DataPresenter from '../../view/data-presenter'; +import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; +import { Job } from 'dc-management-sdk-js'; +import { DEFAULT_SIZE } from '../../common/dc-management-sdk-js/paginator'; +import MockPage from '../../common/dc-management-sdk-js/mock-page'; + +jest.mock('../../services/dynamic-content-client-factory'); +jest.mock('../../view/data-presenter'); + +describe('job list command', () => { + it('should command should defined', function () { + expect(command).toEqual('list'); + }); + + describe('handler', () => { + const yargArgs = { + $0: 'test', + _: ['test'], + json: true + }; + const config = { + clientId: 'client-id', + clientSecret: 'client-id', + hubId: '67d1c1c7642fa239dbe15164' + }; + it('should list jobs', async () => { + const pagingOptions = { sort: 'createdDate,desc' }; + const listResponse = new MockPage(Job, [ + new Job({ id: '68e5289f0aba3024bde00001' }), + new Job({ id: '68e5289f0aba3024bde00002' }) + ]); + const mockListJobs = jest.fn().mockResolvedValue(listResponse); + const mockGetHub = jest.fn().mockResolvedValue({ + related: { + jobs: { + list: mockListJobs + } + } + }); + const mockDataPresenter = DataPresenter as jest.Mock; + + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub + } + }); + + const argv = { ...yargArgs, ...config, ...pagingOptions }; + await handler(argv); + + expect(mockGetHub).toHaveBeenCalledWith('67d1c1c7642fa239dbe15164'); + expect(mockListJobs).toHaveBeenCalledWith({ size: DEFAULT_SIZE, ...pagingOptions }); + + expect(mockDataPresenter).toHaveBeenCalledWith([ + { id: '68e5289f0aba3024bde00001' }, + { id: '68e5289f0aba3024bde00002' } + ]); + expect(mockDataPresenter.mock.instances[0].render).toHaveBeenCalledWith({ itemMapFn, json: argv.json }); + }); + }); +}); diff --git a/src/commands/job/list.ts b/src/commands/job/list.ts index 9682b6d6..55054319 100644 --- a/src/commands/job/list.ts +++ b/src/commands/job/list.ts @@ -1,18 +1,18 @@ -import { Arguments, Argv } from 'yargs'; +import { Arguments } from 'yargs'; import DataPresenter, { RenderingArguments, RenderingOptions } from '../../view/data-presenter'; -import { BuilderOptions } from '../content-repository/assign-content-type'; import { ConfigurationParameters } from '../configure'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { extractSortable, PagingParameters } from '../../common/yargs/sorting-options'; import paginator from '../../common/dc-management-sdk-js/paginator'; import { Job } from 'dc-management-sdk-js'; +import { CommandOptions } from '../../interfaces/command-options.interface'; export const command = 'list'; export const desc = 'List jobs'; -export const builder = (yargs: Argv): void => { - yargs.options(RenderingOptions); +export const builder: CommandOptions = { + ...RenderingOptions }; export const itemMapFn = ({ id, label, status, jobType, originHubId, destinationHubId }: Job): object => ({ @@ -25,7 +25,7 @@ export const itemMapFn = ({ id, label, status, jobType, originHubId, destination }); export const handler = async ( - argv: Arguments + argv: Arguments ): Promise => { const client = dynamicContentClientFactory(argv); const hub = await client.hubs.get(argv.hubId); diff --git a/src/common/content-item/get-content-items-by-ids.spec.ts b/src/common/content-item/get-content-items-by-ids.spec.ts new file mode 100644 index 00000000..979513c4 --- /dev/null +++ b/src/common/content-item/get-content-items-by-ids.spec.ts @@ -0,0 +1,51 @@ +import { ContentItem, DynamicContent } from 'dc-management-sdk-js'; + +import { getContentByIds } from './get-content-items-by-ids'; + +describe('getContentByIds', () => { + it('should get content items for the ids provided', async () => { + const contentItemA = new ContentItem({ + id: 'c5b659df-680e-4711-bfbe-111111111111', + label: 'Content item A', + body: { + _meta: { + schema: 'http://bigcontent.io/cms/schema/v1/text' + }, + text: 'Content item A text' + } + }); + const contentItemB = new ContentItem({ + id: 'c5b659df-680e-4711-bfbe-222222222222', + label: 'Content item B', + body: { + _meta: { + schema: 'http://bigcontent.io/cms/schema/v1/text' + }, + text: 'Content item B text' + } + }); + const mockClient = { + contentItems: { + get: jest.fn().mockResolvedValueOnce(contentItemA).mockResolvedValueOnce(contentItemB) + } + } as unknown as DynamicContent; + + const ids = ['c5b659df-680e-4711-bfbe-111111111111', 'c5b659df-680e-4711-bfbe-222222222222']; + const result = await getContentByIds(mockClient, ids); + + expect(result).toEqual([contentItemA, contentItemB]); + }); + it('should throw an error if a supplied id is missing', async () => { + const mockClient = { + contentItems: { + get: jest.fn().mockRejectedValue(new Error('Authorization required.')) + } + } as unknown as DynamicContent; + + const ids = ['c5b659df-680e-4711-bfbe-111111111111']; + + await expect(getContentByIds(mockClient, ids)).rejects.toThrowErrorMatchingInlineSnapshot( + `"Missing content item with id c5b659df-680e-4711-bfbe-111111111111: Authorization required."` + ); + }); +}); diff --git a/src/common/content-item/get-content-items-by-ids.ts b/src/common/content-item/get-content-items-by-ids.ts index 248aefdf..35dbdc60 100644 --- a/src/common/content-item/get-content-items-by-ids.ts +++ b/src/common/content-item/get-content-items-by-ids.ts @@ -7,7 +7,7 @@ export const getContentByIds = async (client: DynamicContent, ids: string[]) => try { contentItems.push(await client.contentItems.get(id)); } catch (e) { - throw new Error(`Missing content item with id: ${id}: ${e.message} `); + throw new Error(`Missing content item with id ${id}: ${e.message}`); } } diff --git a/src/common/content-item/get-independent-content-items.spec.ts b/src/common/content-item/get-independent-content-items.spec.ts new file mode 100644 index 00000000..973776d7 --- /dev/null +++ b/src/common/content-item/get-independent-content-items.spec.ts @@ -0,0 +1,73 @@ +import { ContentItem } from 'dc-management-sdk-js'; + +import { getIndependentContentItems } from './get-independent-content-items'; + +describe('getIndependentContentItems', () => { + it('should return the same number of content items when only unique content items supplied', () => { + const contentItemA = new ContentItem({ + id: 'c5b659df-680e-4711-bfbe-111111111111', + label: 'Content item A', + body: { + _meta: { + schema: 'http://bigcontent.io/cms/schema/v1/text' + }, + text: 'Content item A text' + } + }); + const contentItemB = new ContentItem({ + id: 'c5b659df-680e-4711-bfbe-222222222222', + label: 'Content item B', + body: { + _meta: { + schema: 'http://bigcontent.io/cms/schema/v1/text' + }, + text: 'Content item B text' + } + }); + + expect(getIndependentContentItems([contentItemA, contentItemB])).toEqual([contentItemA, contentItemB]); + }); + + it('should filter content items if they already existing in another content items immediate graph', () => { + const contentItemA = new ContentItem({ + id: 'c5b659df-680e-4711-bfbe-111111111111', + label: 'Content item A', + body: { + _meta: { + schema: 'http://bigcontent.io/cms/schema/v1/text' + }, + text: 'Content item A text' + } + }); + const contentItemB = new ContentItem({ + id: 'c5b659df-680e-4711-bfbe-222222222222', + label: 'Content item B', + body: { + _meta: { + schema: 'http://bigcontent.io/cms/schema/v1/text' + }, + text: 'Content item B text', + linkedText: { + _meta: { schema: 'http://bigcontent.io/cms/schema/v1/core#/definitions/content-link' }, + contentType: 'http://bigcontent.io/cms/schema/v1/text', + id: 'c5b659df-680e-4711-bfbe-333333333333' + } + } + }); + const contentItemC = new ContentItem({ + id: 'c5b659df-680e-4711-bfbe-333333333333', + label: 'Content item C', + body: { + _meta: { + schema: 'http://bigcontent.io/cms/schema/v1/text' + }, + text: 'Content item C text' + } + }); + + expect(getIndependentContentItems([contentItemA, contentItemB, contentItemC])).toEqual([ + contentItemA, + contentItemB + ]); + }); +}); diff --git a/src/common/content-item/get-root-content-items.ts b/src/common/content-item/get-independent-content-items.ts similarity index 82% rename from src/common/content-item/get-root-content-items.ts rename to src/common/content-item/get-independent-content-items.ts index 707501bb..c9fb5f86 100644 --- a/src/common/content-item/get-root-content-items.ts +++ b/src/common/content-item/get-independent-content-items.ts @@ -2,10 +2,10 @@ import { ContentItem, ContentRepository } from 'dc-management-sdk-js'; import { ContentMapping } from '../content-mapping'; import { ContentDependancyTree } from './content-dependancy-tree'; -export const getRootContentItems = (contentItems: ContentItem[]) => { +export const getIndependentContentItems = (contentItems: ContentItem[]) => { const repoContentItems = contentItems.map(content => ({ repo: new ContentRepository(), content })); const contentTree = new ContentDependancyTree(repoContentItems, new ContentMapping()); - const rootContentItems = contentTree.all + const independentContentItems = contentTree.all .filter(node => { let isTopLevel = true; @@ -23,5 +23,5 @@ export const getRootContentItems = (contentItems: ContentItem[]) => { }) .map(node => node.owner.content); - return rootContentItems; + return independentContentItems; }; From 41f437e49b0ca5f8262b1b7fd6f39f16f3370b04 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 10 Oct 2025 14:51:04 +0100 Subject: [PATCH 29/43] feat: update create sync request model and clearer dedup function name --- src/commands/content-item/sync.service.ts | 4 ++-- src/commands/content-item/sync.ts | 12 ++++++------ ...nt-items.spec.ts => dedupe-content-items.spec.ts} | 11 ++++------- ...dent-content-items.ts => dedupe-content-items.ts} | 6 +++--- 4 files changed, 15 insertions(+), 18 deletions(-) rename src/common/content-item/{get-independent-content-items.spec.ts => dedupe-content-items.spec.ts} (83%) rename src/common/content-item/{get-independent-content-items.ts => dedupe-content-items.ts} (82%) diff --git a/src/commands/content-item/sync.service.ts b/src/commands/content-item/sync.service.ts index 884c83b3..52fc7d4c 100644 --- a/src/commands/content-item/sync.service.ts +++ b/src/commands/content-item/sync.service.ts @@ -1,4 +1,4 @@ -import { ContentItem, Hub, Job } from 'dc-management-sdk-js'; +import { ContentItem, CreateDeepSyncJobRequest, Hub, Job } from 'dc-management-sdk-js'; import { BurstableQueue } from '../../common/burstable-queue/burstable-queue'; import { setTimeout } from 'node:timers/promises'; @@ -15,7 +15,7 @@ export class ContentItemSyncService { sync(destinationHubId: string, hub: Hub, contentItem: ContentItem, action: (job: Job) => void): void { this.queue.add(async () => { const createSyncJob = await hub.related.jobs.createDeepSyncJob( - new Job({ + new CreateDeepSyncJobRequest({ label: `dc-cli content item: ${contentItem.label}`, ignoreSchemaValidation: true, destinationHubId, diff --git a/src/commands/content-item/sync.ts b/src/commands/content-item/sync.ts index 2a2c790b..a3ae5223 100644 --- a/src/commands/content-item/sync.ts +++ b/src/commands/content-item/sync.ts @@ -9,7 +9,7 @@ import { getContent } from '../../common/filter/fetch-content'; import { confirmAllContent } from '../../common/content-item/confirm-all-content'; import { progressBar } from '../../common/progress-bar/progress-bar'; import { ContentItemSyncService } from './sync.service'; -import { getIndependentContentItems } from '../../common/content-item/get-independent-content-items'; +import { dedupeContentItems } from '../../common/content-item/dedupe-content-items'; import { getContentByIds } from '../../common/content-item/get-content-items-by-ids'; export const LOG_FILENAME = (platform: string = process.platform): string => @@ -113,11 +113,11 @@ export const handler = async (argv: Arguments { + dedupedContentItems.forEach(contentItem => { log.addComment(`Requesting content item sync: ${contentItem.label}`); syncService.sync(destinationHubId, hub, contentItem, (syncJob: Job) => { progress.increment(); diff --git a/src/common/content-item/get-independent-content-items.spec.ts b/src/common/content-item/dedupe-content-items.spec.ts similarity index 83% rename from src/common/content-item/get-independent-content-items.spec.ts rename to src/common/content-item/dedupe-content-items.spec.ts index 973776d7..14020bbb 100644 --- a/src/common/content-item/get-independent-content-items.spec.ts +++ b/src/common/content-item/dedupe-content-items.spec.ts @@ -1,8 +1,8 @@ import { ContentItem } from 'dc-management-sdk-js'; -import { getIndependentContentItems } from './get-independent-content-items'; +import { dedupeContentItems } from './dedupe-content-items'; -describe('getIndependentContentItems', () => { +describe('dedupeContentItems', () => { it('should return the same number of content items when only unique content items supplied', () => { const contentItemA = new ContentItem({ id: 'c5b659df-680e-4711-bfbe-111111111111', @@ -25,7 +25,7 @@ describe('getIndependentContentItems', () => { } }); - expect(getIndependentContentItems([contentItemA, contentItemB])).toEqual([contentItemA, contentItemB]); + expect(dedupeContentItems([contentItemA, contentItemB])).toEqual([contentItemA, contentItemB]); }); it('should filter content items if they already existing in another content items immediate graph', () => { @@ -65,9 +65,6 @@ describe('getIndependentContentItems', () => { } }); - expect(getIndependentContentItems([contentItemA, contentItemB, contentItemC])).toEqual([ - contentItemA, - contentItemB - ]); + expect(dedupeContentItems([contentItemA, contentItemB, contentItemC])).toEqual([contentItemA, contentItemB]); }); }); diff --git a/src/common/content-item/get-independent-content-items.ts b/src/common/content-item/dedupe-content-items.ts similarity index 82% rename from src/common/content-item/get-independent-content-items.ts rename to src/common/content-item/dedupe-content-items.ts index c9fb5f86..1988f05e 100644 --- a/src/common/content-item/get-independent-content-items.ts +++ b/src/common/content-item/dedupe-content-items.ts @@ -2,10 +2,10 @@ import { ContentItem, ContentRepository } from 'dc-management-sdk-js'; import { ContentMapping } from '../content-mapping'; import { ContentDependancyTree } from './content-dependancy-tree'; -export const getIndependentContentItems = (contentItems: ContentItem[]) => { +export const dedupeContentItems = (contentItems: ContentItem[]) => { const repoContentItems = contentItems.map(content => ({ repo: new ContentRepository(), content })); const contentTree = new ContentDependancyTree(repoContentItems, new ContentMapping()); - const independentContentItems = contentTree.all + const dedupedContentItems = contentTree.all .filter(node => { let isTopLevel = true; @@ -23,5 +23,5 @@ export const getIndependentContentItems = (contentItems: ContentItem[]) => { }) .map(node => node.owner.content); - return independentContentItems; + return dedupedContentItems; }; From 6d9fec26dbd8370ab2b099ba5e51b3f08de215da Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Wed, 15 Oct 2025 15:27:37 +0100 Subject: [PATCH 30/43] feat: bump to latest management sdk --- package-lock.json | 24 ++++++++++++------------ package.json | 2 +- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/package-lock.json b/package-lock.json index 29c3bcfc..4e4de3a6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,7 +15,7 @@ "bottleneck": "^2.19.5", "chalk": "^2.4.2", "cli-progress": "^3.12.0", - "dc-management-sdk-js": "^3.0.3", + "dc-management-sdk-js": "^3.1.0", "enquirer": "^2.3.6", "fs-extra": "^10.1.0", "graceful-fs": "^4.2.11", @@ -4106,13 +4106,13 @@ } }, "node_modules/axios": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.10.0.tgz", - "integrity": "sha512-/1xYAC4MP/HEG+3duIhFr4ZQXR4sQXOIe+o6sdqzeykGLx6Upp/1p8MHqhINOvGeP7xyNHe7tsiJByc4SSVUxw==", + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", + "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", "license": "MIT", "dependencies": { "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", + "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, @@ -5194,17 +5194,17 @@ } }, "node_modules/dc-management-sdk-js": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/dc-management-sdk-js/-/dc-management-sdk-js-3.0.3.tgz", - "integrity": "sha512-a8iLEKTHmPbXPeIED1Eipx4U3rQbVgapK7LrFr4rxBmkxWsTUvkk0i3xZcg9P3NCbpqGZ6nl2yFHQGOhRLj2fw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/dc-management-sdk-js/-/dc-management-sdk-js-3.1.0.tgz", + "integrity": "sha512-UP0C0V9b6f2Te8DiJBWmPIOFpSIwjouf7uIGWkbwK0/AF5EghFYeDnF7JYV1z1mpAINntNkjJ+QXIhzuaLo2/A==", "license": "Apache-2.0", "dependencies": { - "axios": "^1.8.4", - "axios-retry": "^4.5.0", - "url-template": "^2.0.8" + "axios": "1.12.2", + "axios-retry": "4.5.0", + "url-template": "2.0.8" }, "engines": { - "node": ">=18" + "node": ">=20" } }, "node_modules/debug": { diff --git a/package.json b/package.json index 44b14be1..ce12cc52 100644 --- a/package.json +++ b/package.json @@ -115,7 +115,7 @@ "bottleneck": "^2.19.5", "chalk": "^2.4.2", "cli-progress": "^3.12.0", - "dc-management-sdk-js": "^3.0.3", + "dc-management-sdk-js": "^3.1.0", "enquirer": "^2.3.6", "fs-extra": "^10.1.0", "graceful-fs": "^4.2.11", From 288d29219c5b6044c7fb6e144a5529c28f1ce867 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Mon, 20 Oct 2025 15:12:38 +0100 Subject: [PATCH 31/43] fix: pin specific jest dependency versions --- package-lock.json | 4 ++-- package.json | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index c9ad3b2c..4913f067 100644 --- a/package-lock.json +++ b/package-lock.json @@ -38,7 +38,7 @@ "@types/cli-progress": "3.11.6", "@types/fs-extra": "9.0.13", "@types/graceful-fs": "4.1.9", - "@types/jest": "^30.0.0", + "@types/jest": "30.0.0", "@types/lodash": "4.14.144", "@types/node": "20.17.19", "@types/node-fetch": "2.5.7", @@ -57,7 +57,7 @@ "eslint-plugin-prettier": "5.2.3", "globals": "15.15.0", "husky": "3.0.5", - "jest": "^30.0.5", + "jest": "30.0.5", "nock": "12.0.3", "prettier": "3.5.1", "ts-jest": "29.4.1", diff --git a/package.json b/package.json index 072d1691..437f9340 100644 --- a/package.json +++ b/package.json @@ -82,7 +82,7 @@ "@types/cli-progress": "3.11.6", "@types/fs-extra": "9.0.13", "@types/graceful-fs": "4.1.9", - "@types/jest": "^30.0.0", + "@types/jest": "30.0.0", "@types/lodash": "4.14.144", "@types/node": "20.17.19", "@types/node-fetch": "2.5.7", @@ -101,7 +101,7 @@ "eslint-plugin-prettier": "5.2.3", "globals": "15.15.0", "husky": "3.0.5", - "jest": "^30.0.5", + "jest": "30.0.5", "nock": "12.0.3", "prettier": "3.5.1", "ts-jest": "29.4.1", From d347c4b6139d6fc6ad332613944f6bfe44e70217 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 24 Oct 2025 14:41:51 +0100 Subject: [PATCH 32/43] fix: change missing content item behaviour to match similar commands --- src/commands/content-item/sync.ts | 17 +++++++++++++---- .../get-content-items-by-ids.spec.ts | 8 ++++---- .../content-item/get-content-items-by-ids.ts | 4 ++-- 3 files changed, 19 insertions(+), 10 deletions(-) diff --git a/src/commands/content-item/sync.ts b/src/commands/content-item/sync.ts index a3ae5223..223282da 100644 --- a/src/commands/content-item/sync.ts +++ b/src/commands/content-item/sync.ts @@ -105,9 +105,16 @@ export const handler = async (argv: Arguments 0 + ? await getContentByIds(client, ids) + : await getContent(client, hub, facet, { repoId, folderId, status: Status.ACTIVE, enrichItems: true }); if (!contentItems.length) { console.log('Nothing found to sync, aborting'); @@ -120,8 +127,10 @@ export const handler = async (argv: Arguments { expect(result).toEqual([contentItemA, contentItemB]); }); - it('should throw an error if a supplied id is missing', async () => { + it('should ignore error if a supplied id is missing', async () => { const mockClient = { contentItems: { get: jest.fn().mockRejectedValue(new Error('Authorization required.')) @@ -44,8 +44,8 @@ describe('getContentByIds', () => { const ids = ['c5b659df-680e-4711-bfbe-111111111111']; - await expect(getContentByIds(mockClient, ids)).rejects.toThrowErrorMatchingInlineSnapshot( - `"Missing content item with id c5b659df-680e-4711-bfbe-111111111111: Authorization required."` - ); + const result = await getContentByIds(mockClient, ids); + + expect(result).toEqual([]); }); }); diff --git a/src/common/content-item/get-content-items-by-ids.ts b/src/common/content-item/get-content-items-by-ids.ts index 35dbdc60..565a28d5 100644 --- a/src/common/content-item/get-content-items-by-ids.ts +++ b/src/common/content-item/get-content-items-by-ids.ts @@ -1,13 +1,13 @@ import { ContentItem, DynamicContent } from 'dc-management-sdk-js'; -export const getContentByIds = async (client: DynamicContent, ids: string[]) => { +export const getContentByIds = async (client: DynamicContent, ids: string[]): Promise => { const contentItems: ContentItem[] = []; for (const id of ids) { try { contentItems.push(await client.contentItems.get(id)); } catch (e) { - throw new Error(`Missing content item with id ${id}: ${e.message}`); + // Silently fail missing content items } } From f60e30cefbe573006085119a98660a09751078f3 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Tue, 28 Oct 2025 09:12:26 +0000 Subject: [PATCH 33/43] fix: only display missing content message when id arg used --- src/commands/content-item/sync.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/commands/content-item/sync.ts b/src/commands/content-item/sync.ts index 223282da..22557e51 100644 --- a/src/commands/content-item/sync.ts +++ b/src/commands/content-item/sync.ts @@ -127,7 +127,7 @@ export const handler = async (argv: Arguments 0 ? Boolean(ids.length !== contentItems.length) : false; if (!force) { const yes = await confirmAllContent('sync', 'content items', allContent, missingContentItems); From 66212ca5a07be5b04eaf922cd0d7dccd3b5f3380 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Tue, 28 Oct 2025 14:58:51 +0000 Subject: [PATCH 34/43] feat: use common content item fetch logic for publish command --- src/commands/content-item/publish.spec.ts | 339 ++++++++++++---------- src/commands/content-item/publish.ts | 172 +++-------- 2 files changed, 233 insertions(+), 278 deletions(-) diff --git a/src/commands/content-item/publish.spec.ts b/src/commands/content-item/publish.spec.ts index d24ec78f..6fd457b4 100644 --- a/src/commands/content-item/publish.spec.ts +++ b/src/commands/content-item/publish.spec.ts @@ -1,31 +1,40 @@ -import { builder, handler, getContentItems, processItems, LOG_FILENAME, coerceLog } from './publish'; -import { Status, ContentItem, DynamicContent, Hub, PublishingJob } from 'dc-management-sdk-js'; +import { builder, handler, LOG_FILENAME, coerceLog } from './publish'; +import { ContentItem, Hub, PublishingJob, Job } from 'dc-management-sdk-js'; import { FileLog } from '../../common/file-log'; -import { Arguments } from 'yargs'; -import { ConfigurationParameters } from '../configure'; -import PublishOptions from '../../common/publish/publish-options'; import Yargs from 'yargs/yargs'; -import readline from 'readline'; import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingJobStatus'; +import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; +import { getContentByIds } from '../../common/content-item/get-content-items-by-ids'; +import { getContent } from '../../common/filter/fetch-content'; +import * as confirmAllContentModule from '../../common/content-item/confirm-all-content'; +import * as questionHelpers from '../../common/question-helpers'; const mockPublish = jest.fn().mockImplementation((contentItems, fn) => { - fn(contentItems); + fn(contentItems, new PublishingJob({ state: PublishingJobStatus.CREATED })); }); const mockCheck = jest.fn().mockImplementation((publishingJob, fn) => { fn(new PublishingJob({ state: PublishingJobStatus.COMPLETED })); }); +const mockPublishOnIdle = jest.fn().mockImplementation(() => Promise.resolve()); +const mockCheckOnIdle = jest.fn().mockImplementation(() => Promise.resolve()); + +const confirmAllContentSpy = jest.spyOn(confirmAllContentModule, 'confirmAllContent'); +const asyncQuestionSpy = jest.spyOn(questionHelpers, 'asyncQuestion'); jest.mock('../../services/dynamic-content-client-factory'); -jest.mock('../../common/content-item/confirm-all-content'); jest.mock('../../common/log-helpers'); jest.mock('../../common/filter/fetch-content'); -jest.mock('readline'); +jest.mock('../../common/content-item/get-content-items-by-ids', () => { + return { + getContentByIds: jest.fn() + }; +}); jest.mock('../../common/publishing/content-item-publishing-service', () => { return { ContentItemPublishingService: jest.fn().mockImplementation(() => { return { publish: mockPublish, - onIdle: jest.fn() + onIdle: mockPublishOnIdle }; }) }; @@ -35,29 +44,12 @@ jest.mock('../../common/publishing/content-item-publishing-job-service', () => { ContentItemPublishingJobService: jest.fn().mockImplementation(() => { return { check: mockCheck, - onIdle: jest.fn() + onIdle: mockCheckOnIdle }; }) }; }); -const mockClient = { - contentItems: { - get: jest.fn() - }, - hubs: { - get: jest.fn() - } -} as unknown as DynamicContent; - -const mockLog = { - open: jest.fn().mockReturnValue({ - appendLine: jest.fn(), - addComment: jest.fn(), - close: jest.fn() - }) -} as unknown as FileLog; - describe('publish tests', () => { describe('builder tests', () => { it('should configure yargs', function () { @@ -112,113 +104,93 @@ describe('publish tests', () => { }); }); - describe('getContentItems tests', () => { - beforeEach(() => jest.clearAllMocks()); - - it('should return content items by id', async () => { - const mockItem = { id: '1', status: Status.ACTIVE } as ContentItem; - mockClient.contentItems.get = jest.fn().mockResolvedValue(mockItem); - - const result = await getContentItems({ - client: mockClient, - id: '1', - hubId: 'hub-id' - }); - - expect(result.contentItems).toEqual([mockItem]); - expect(result.missingContent).toBe(false); - }); - - it('should filter out non-active content items', async () => { - mockClient.contentItems.get = jest - .fn() - .mockResolvedValueOnce({ id: '1', status: Status.ARCHIVED }) - .mockResolvedValueOnce({ id: '2', status: Status.ACTIVE }); - - const result = await getContentItems({ - client: mockClient, - id: ['1', '2'], - hubId: 'hub-id' - }); - - expect(result.contentItems).toHaveLength(1); - expect(result.contentItems[0].id).toBe('2'); - expect(result.missingContent).toBe(true); - }); - - it('should return content using fallback filters', async () => { - const mockHub = {} as Hub; - const contentItems = [{ id: 'a', status: Status.ACTIVE }] as ContentItem[]; - const getContent = require('../../common/filter/fetch-content').getContent; - mockClient.hubs.get = jest.fn().mockResolvedValue(mockHub); - getContent.mockResolvedValue(contentItems); - - const result = await getContentItems({ - client: mockClient, - hubId: 'hub-id', - facet: 'label:test' - }); - - expect(result.contentItems).toEqual(contentItems); - }); - }); + describe('handler', () => { + const HUB_ID = '67d1c1c7642fa239dbe15164'; + const CONTENT_ITEM_ID = 'c5b659df-680e-4711-bfbe-84eaa10d76cc'; + const globalArgs = { + $0: 'test', + _: ['test'], + json: true, + clientId: 'client-id', + clientSecret: 'client-secret', + hubId: HUB_ID + }; + + const mockLog = { + open: jest.fn().mockReturnValue({ + appendLine: jest.fn(), + addComment: jest.fn(), + close: jest.fn() + }) + } as unknown as FileLog; - describe('processItems tests', () => { beforeEach(() => { jest.clearAllMocks(); - jest.mock('readline'); + confirmAllContentSpy.mockResolvedValue(true); + asyncQuestionSpy.mockResolvedValue(true); }); - it('should exit early if no content items', async () => { - console.log = jest.fn(); - - await processItems({ - contentItems: [], - logFile: mockLog, - allContent: false, - missingContent: false, - client: mockClient + it('should publish content item by id', async () => { + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } }); + (getContentByIds as unknown as jest.Mock).mockResolvedValue([ + new ContentItem({ id: CONTENT_ITEM_ID, body: { _meta: {} } }) + ]); - expect(console.log).toHaveBeenCalledWith('Nothing found to publish, aborting.'); - }); + mockPublish.mockImplementation((contentItem, fn) => { + fn(new Job({ id: '68e5289f0aba3024bde050f9', status: 'COMPLETE' })); + }); - it('should confirm before publishing when force is false', async () => { - const confirmAllContent = require('../../common/content-item/confirm-all-content').confirmAllContent; - confirmAllContent.mockResolvedValue(false); - console.log = jest.fn(); - - await processItems({ - contentItems: [new ContentItem({ id: '1', label: 'Test', body: { _meta: {} } })], - force: false, - silent: true, - logFile: mockLog, - allContent: false, - missingContent: false, - client: mockClient + await handler({ + ...globalArgs, + id: CONTENT_ITEM_ID, + logFile: mockLog }); - expect(confirmAllContent).toHaveBeenCalled(); + expect(getContentByIds).toHaveBeenCalledWith(expect.any(Object), [CONTENT_ITEM_ID]); + expect(mockPublish).toHaveBeenCalledTimes(1); + expect(mockPublish).toHaveBeenCalledWith(expect.any(ContentItem), expect.any(Function)); + expect(mockPublishOnIdle).toHaveBeenCalledTimes(1); + expect(mockCheck).toHaveBeenCalledTimes(1); + expect(mockCheckOnIdle).toHaveBeenCalledTimes(1); }); + it('should publish content items by query', async () => { + const REPOSITORY_ID = '67d1c1cf642fa239dbe15165'; + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } + }); + (getContent as unknown as jest.Mock).mockResolvedValue([ + new ContentItem({ id: CONTENT_ITEM_ID, body: { _meta: {} } }) + ]); - it('should process all items and call publish', async () => { - const contentItem = new ContentItem({ id: '1', label: 'Publish Me', body: { _meta: {} } }); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (readline as any).setResponses(['Y']); + mockPublish.mockImplementation((contentItem, fn) => { + fn(new Job({ id: '68e5289f0aba3024bde050f9', status: 'COMPLETE' })); + }); - await processItems({ - contentItems: [contentItem], - force: true, - silent: true, - logFile: mockLog, - allContent: false, - missingContent: false, - client: mockClient + await handler({ + ...globalArgs, + repoId: REPOSITORY_ID, + logFile: mockLog }); + expect(getContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Hub), undefined, { + enrichItems: true, + folderId: undefined, + repoId: REPOSITORY_ID, + status: 'ACTIVE' + }); expect(mockPublish).toHaveBeenCalledTimes(1); + expect(mockPublish).toHaveBeenCalledWith(expect.any(ContentItem), expect.any(Function)); + expect(mockPublishOnIdle).toHaveBeenCalledTimes(1); expect(mockCheck).toHaveBeenCalledTimes(1); + expect(mockCheckOnIdle).toHaveBeenCalledTimes(1); }); it('should process all items while filtering out any dependencies and call publish', async () => { @@ -239,55 +211,116 @@ describe('publish tests', () => { label: 'No need to publish me', body: { _meta: {} } }); + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } + }); + (getContentByIds as unknown as jest.Mock).mockResolvedValue([contentItemWithDependency, contentItemDependency]); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (readline as any).setResponses(['Y']); - - await processItems({ - contentItems: [contentItemWithDependency, contentItemDependency], - force: true, - silent: true, - logFile: mockLog, - allContent: false, - missingContent: false, - client: mockClient + mockPublish.mockImplementation((contentItem, fn) => { + fn(new Job({ id: '68e5289f0aba3024bde050f9', status: 'COMPLETE' })); + }); + + await handler({ + ...globalArgs, + id: [contentItemWithDependency.id, contentItemDependency.id], + logFile: mockLog }); expect(mockPublish).toHaveBeenCalledTimes(1); + expect(mockPublish).toHaveBeenCalledWith(contentItemWithDependency, expect.any(Function)); + expect(mockPublishOnIdle).toHaveBeenCalledTimes(1); expect(mockCheck).toHaveBeenCalledTimes(1); + expect(mockCheckOnIdle).toHaveBeenCalledTimes(1); }); - }); - describe('handler tests', () => { - const clientFactory = require('../../services/dynamic-content-client-factory').default; - const getItemsSpy = jest.spyOn(require('./publish'), 'getContentItems'); - const processSpy = jest.spyOn(require('./publish'), 'processItems'); - beforeEach(() => { - jest.clearAllMocks(); - clientFactory.mockReturnValue(mockClient); - getItemsSpy.mockResolvedValue({ - contentItems: [{ id: '123', label: 'Test', status: Status.ACTIVE }], - missingContent: false + it('should exit before processing content items if confirmation to proceed is rejected', async () => { + confirmAllContentSpy.mockResolvedValue(false); + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } + }); + (getContentByIds as unknown as jest.Mock).mockResolvedValue([ + new ContentItem({ id: CONTENT_ITEM_ID, body: { _meta: {} } }) + ]); + + await handler({ + ...globalArgs, + id: CONTENT_ITEM_ID, + logFile: mockLog + }); + expect(mockPublish).toHaveBeenCalledTimes(0); + expect(mockPublishOnIdle).toHaveBeenCalledTimes(0); + expect(mockCheck).toHaveBeenCalledTimes(0); + expect(mockCheckOnIdle).toHaveBeenCalledTimes(0); + }); + + it('should not check publishing jobs if check question is rejected', async () => { + asyncQuestionSpy.mockResolvedValue(false); + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } }); - processSpy.mockResolvedValue(undefined); + (getContentByIds as unknown as jest.Mock).mockResolvedValue([ + new ContentItem({ id: 'CONTENT_ITEM_ID_ZZZZZZZZZ', body: { _meta: {} } }) + ]); + + mockPublish.mockImplementation((contentItem, fn) => { + fn(new Job({ id: '68e5289f0aba3024bde050f9', status: 'COMPLETE' })); + }); + + await handler({ + ...globalArgs, + id: 'CONTENT_ITEM_ID_ZZZZZZZZZ', + logFile: mockLog + }); + + expect(mockPublish).toHaveBeenCalledTimes(1); + expect(mockPublishOnIdle).toHaveBeenCalledTimes(1); + expect(mockCheck).toHaveBeenCalledTimes(0); + expect(mockCheckOnIdle).toHaveBeenCalledTimes(0); }); - it('should warn when both id and facet are provided', async () => { - console.log = jest.fn(); + + it('should exit early if ID or query args are not passed', async () => { + const logSpy = jest.spyOn(console, 'log'); await handler({ - id: '1', - facet: 'label:test', - hubId: 'hub-id', + ...globalArgs, + id: CONTENT_ITEM_ID, + facet: 'mock-facet', logFile: mockLog - } as Arguments); - expect(console.log).toHaveBeenCalledWith('Please specify either a facet or an ID - not both.'); + }); + expect(logSpy).toHaveBeenCalledWith('Please specify either a facet or an ID - not both'); + expect(mockPublish).toHaveBeenCalledTimes(0); + expect(mockPublishOnIdle).toHaveBeenCalledTimes(0); + expect(mockCheck).toHaveBeenCalledTimes(0); + expect(mockCheckOnIdle).toHaveBeenCalledTimes(0); }); - it('should process items with valid inputs', async () => { + + it('should exit early if no content items', async () => { + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } + }); + (getContentByIds as unknown as jest.Mock).mockResolvedValue([]); + const logSpy = jest.spyOn(console, 'log'); await handler({ - hubId: 'hub-id', + ...globalArgs, + id: CONTENT_ITEM_ID, logFile: mockLog - } as Arguments); - expect(getItemsSpy).toHaveBeenCalled(); - expect(processSpy).toHaveBeenCalled(); + }); + expect(logSpy).toHaveBeenCalledWith('Nothing found to publish, aborting'); + expect(mockPublish).toHaveBeenCalledTimes(0); + expect(mockPublishOnIdle).toHaveBeenCalledTimes(0); + expect(mockCheck).toHaveBeenCalledTimes(0); + expect(mockCheckOnIdle).toHaveBeenCalledTimes(0); }); }); }); diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index 526b7386..290b6918 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -4,18 +4,18 @@ import { ConfigurationParameters } from '../configure'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { confirmAllContent } from '../../common/content-item/confirm-all-content'; import PublishOptions from '../../common/publish/publish-options'; -import { ContentItem, ContentRepository, DynamicContent, PublishingJob, Status } from 'dc-management-sdk-js'; +import { ContentItem, DynamicContent, PublishingJob, Status } from 'dc-management-sdk-js'; import { getDefaultLogPath, createLog } from '../../common/log-helpers'; import { FileLog } from '../../common/file-log'; import { withOldFilters } from '../../common/filter/facet'; import { getContent } from '../../common/filter/fetch-content'; import { asyncQuestion } from '../../common/question-helpers'; -import { ContentDependancyTree } from '../../common/content-item/content-dependancy-tree'; -import { ContentMapping } from '../../common/content-mapping'; import { ContentItemPublishingService } from '../../common/publishing/content-item-publishing-service'; import { ContentItemPublishingJobService } from '../../common/publishing/content-item-publishing-job-service'; import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingJobStatus'; import { progressBar } from '../../common/progress-bar/progress-bar'; +import { getContentByIds } from '../../common/content-item/get-content-items-by-ids'; +import { dedupeContentItems } from '../../common/content-item/dedupe-content-items'; export const command = 'publish [id]'; @@ -72,122 +72,27 @@ export const builder = (yargs: Argv): void => { }); }; -export const getContentItems = async ({ - client, - id, - hubId, - repoId, - folderId, - facet -}: { - client: DynamicContent; - id?: string | string[]; - hubId: string; - repoId?: string | string[]; - folderId?: string | string[]; - facet?: string; -}): Promise<{ contentItems: ContentItem[]; missingContent: boolean }> => { - try { - let contentItems: ContentItem[] = []; - - if (id != null) { - const itemIds = Array.isArray(id) ? id : [id]; - const items: ContentItem[] = []; - - for (const id of itemIds) { - try { - items.push(await client.contentItems.get(id)); - } catch { - // Missing item. - } - } - - contentItems.push(...items.filter(item => item.status === Status.ACTIVE)); - - return { - contentItems, - missingContent: contentItems.length != itemIds.length - }; - } - - const hub = await client.hubs.get(hubId); - - contentItems = await getContent(client, hub, facet, { repoId, folderId, status: Status.ACTIVE, enrichItems: true }); - - return { contentItems, missingContent: false }; - } catch (err) { - console.log(err); - - return { - contentItems: [], - missingContent: false - }; - } -}; - -export const processItems = async ({ +const processItems = async ({ client, contentItems, force, - silent, - logFile, - allContent, - missingContent + log }: { client: DynamicContent; contentItems: ContentItem[]; force?: boolean; - silent?: boolean; - logFile: FileLog; - allContent: boolean; - missingContent: boolean; + log: FileLog; }): Promise => { - if (contentItems.length == 0) { - console.log('Nothing found to publish, aborting.'); - return; - } - - const repoContentItems = contentItems.map(content => ({ repo: new ContentRepository(), content })); - const contentTree = new ContentDependancyTree(repoContentItems, new ContentMapping()); - let childCount = 0; - const rootContentItems = contentTree.all - .filter(node => { - let isTopLevel = true; - - contentTree.traverseDependants( - node, - dependant => { - if (dependant != node && contentTree.all.findIndex(entry => entry === dependant) !== -1) { - isTopLevel = false; - childCount++; - } - }, - true - ); - - return isTopLevel; - }) - .map(node => node.owner.content); + const dedupedContentItems = dedupeContentItems(contentItems); - const log = logFile.open(); log.appendLine( - `Found ${rootContentItems.length} item(s) to publish (ignoring ${childCount} duplicate child item(s)).` + `Publishing ${dedupedContentItems.length} item(s) (ignoring ${contentItems.length - dedupedContentItems.length} duplicate child item(s))` ); - if (!force) { - const yes = await confirmAllContent('publish', 'content items', allContent, missingContent); - if (!yes) { - return; - } - } - - log.appendLine(`Publishing ${rootContentItems.length} item(s).`); - const publishingService = new ContentItemPublishingService(); const contentItemPublishJobs: [ContentItem, PublishingJob][] = []; - const publishProgress = progressBar(rootContentItems.length, 0, { title: 'Publishing content items' }); - - for (const item of rootContentItems) { + const publishProgress = progressBar(dedupedContentItems.length, 0, { title: 'Publishing content items' }); + for (const item of dedupedContentItems) { try { await publishingService.publish(item, (contentItem, publishingJob) => { contentItemPublishJobs.push([contentItem, publishingJob]); @@ -229,14 +134,11 @@ export const processItems = async ({ await publishingJobService.onIdle(); checkPublishProgress.stop(); } - - log.appendLine(`Publishing complete`); - - await log.close(!silent); }; export const handler = async (argv: Arguments): Promise => { const { id, logFile, force, silent, hubId, repoId, folderId } = argv; + const log = logFile.open(); const client = dynamicContentClientFactory(argv); const facet = withOldFilters(argv.facet, argv); @@ -244,38 +146,58 @@ export const handler = async (argv: Arguments 0 + ? await getContentByIds(client, ids) + : await getContent(client, hub, facet, { repoId, folderId, status: Status.ACTIVE, enrichItems: true }); + + if (!contentItems.length) { + log.appendLine('Nothing found to publish, aborting'); + return; + } + + const missingContentItems = ids.length > 0 ? Boolean(ids.length !== contentItems.length) : false; + + log.appendLine(`Found ${contentItems.length} content items to publish (including duplicate child items)\n`); + + if (!force) { + const yes = await confirmAllContent('publish', 'content items', allContent, missingContentItems); + if (!yes) { + return; + } + } await processItems({ client, contentItems, force, - silent, - logFile, - allContent, - missingContent + log }); + + log.appendLine(`Publishing complete`); + + await log.close(!silent); }; From 6578b04fd9a8809d688274183b5ca69fe4239cc3 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Tue, 28 Oct 2025 15:04:45 +0000 Subject: [PATCH 35/43] fix: make sure sync command exits early when no content found --- src/commands/content-item/sync.ts | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/commands/content-item/sync.ts b/src/commands/content-item/sync.ts index 22557e51..94d66b74 100644 --- a/src/commands/content-item/sync.ts +++ b/src/commands/content-item/sync.ts @@ -81,26 +81,27 @@ export default interface SyncOptions { export const handler = async (argv: Arguments): Promise => { const { id, logFile, force, silent, hubId, repoId, folderId, destinationHubId } = argv; + const log = logFile.open(); const client = dynamicContentClientFactory(argv); const facet = withOldFilters(argv.facet, argv); if (repoId && id) { - console.log('ID of content item is specified, ignoring repository ID'); + log.appendLine('ID of content item is specified, ignoring repository ID'); } if (id && facet) { - console.log('Please specify either a facet or an ID - not both'); + log.appendLine('Please specify either a facet or an ID - not both'); return; } if (repoId && folderId) { - console.log('Folder is specified, ignoring repository ID'); + log.appendLine('Folder is specified, ignoring repository ID'); } const allContent = !id && !facet && !folderId && !repoId; if (allContent) { - console.log('No filter was given, syncing all content'); + log.appendLine('No filter was given, syncing all content'); } const hub = await client.hubs.get(hubId); @@ -117,11 +118,11 @@ export const handler = async (argv: Arguments Date: Tue, 28 Oct 2025 17:47:01 +0000 Subject: [PATCH 36/43] feat: update unpublish command to use shared content item fetch --- src/commands/content-item/publish.spec.ts | 9 +- src/commands/content-item/publish.ts | 1 - src/commands/content-item/unpublish.spec.ts | 324 +++++++++++--------- src/commands/content-item/unpublish.ts | 166 +++------- 4 files changed, 226 insertions(+), 274 deletions(-) diff --git a/src/commands/content-item/publish.spec.ts b/src/commands/content-item/publish.spec.ts index 6fd457b4..70df92e2 100644 --- a/src/commands/content-item/publish.spec.ts +++ b/src/commands/content-item/publish.spec.ts @@ -116,9 +116,10 @@ describe('publish tests', () => { hubId: HUB_ID }; + const mockAppendLine = jest.fn(); const mockLog = { open: jest.fn().mockReturnValue({ - appendLine: jest.fn(), + appendLine: mockAppendLine, addComment: jest.fn(), close: jest.fn() }) @@ -288,14 +289,13 @@ describe('publish tests', () => { }); it('should exit early if ID or query args are not passed', async () => { - const logSpy = jest.spyOn(console, 'log'); await handler({ ...globalArgs, id: CONTENT_ITEM_ID, facet: 'mock-facet', logFile: mockLog }); - expect(logSpy).toHaveBeenCalledWith('Please specify either a facet or an ID - not both'); + expect(mockAppendLine).toHaveBeenCalledWith('Please specify either a facet or an ID - not both'); expect(mockPublish).toHaveBeenCalledTimes(0); expect(mockPublishOnIdle).toHaveBeenCalledTimes(0); expect(mockCheck).toHaveBeenCalledTimes(0); @@ -310,13 +310,12 @@ describe('publish tests', () => { } }); (getContentByIds as unknown as jest.Mock).mockResolvedValue([]); - const logSpy = jest.spyOn(console, 'log'); await handler({ ...globalArgs, id: CONTENT_ITEM_ID, logFile: mockLog }); - expect(logSpy).toHaveBeenCalledWith('Nothing found to publish, aborting'); + expect(mockAppendLine).toHaveBeenCalledWith('Nothing found to publish, aborting'); expect(mockPublish).toHaveBeenCalledTimes(0); expect(mockPublishOnIdle).toHaveBeenCalledTimes(0); expect(mockCheck).toHaveBeenCalledTimes(0); diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index 290b6918..78ab1738 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -1,4 +1,3 @@ -/* eslint-disable @typescript-eslint/ban-ts-comment */ import { Arguments, Argv } from 'yargs'; import { ConfigurationParameters } from '../configure'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; diff --git a/src/commands/content-item/unpublish.spec.ts b/src/commands/content-item/unpublish.spec.ts index b5189a7f..dfcc4fd0 100644 --- a/src/commands/content-item/unpublish.spec.ts +++ b/src/commands/content-item/unpublish.spec.ts @@ -1,48 +1,41 @@ -import { builder, handler, getContentItems, processItems, LOG_FILENAME, coerceLog } from './unpublish'; -import { Status, ContentItem, DynamicContent, Hub } from 'dc-management-sdk-js'; +import { builder, handler, LOG_FILENAME, coerceLog } from './unpublish'; +import { ContentItem, Hub, PublishingJob, Job, ContentItemPublishingStatus } from 'dc-management-sdk-js'; import { FileLog } from '../../common/file-log'; -import { Arguments } from 'yargs'; -import { ConfigurationParameters } from '../configure'; -import PublishOptions from '../../common/publish/publish-options'; import Yargs from 'yargs/yargs'; +import { PublishingJobStatus } from 'dc-management-sdk-js/build/main/lib/model/PublishingJobStatus'; +import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; +import { getContentByIds } from '../../common/content-item/get-content-items-by-ids'; +import { getContent } from '../../common/filter/fetch-content'; +import * as confirmAllContentModule from '../../common/content-item/confirm-all-content'; +import * as questionHelpers from '../../common/question-helpers'; const mockUnpublish = jest.fn().mockImplementation((contentItems, fn) => { - fn(contentItems); + fn(contentItems, new PublishingJob({ state: PublishingJobStatus.CREATED })); }); +const mockUnpublishOnIdle = jest.fn().mockImplementation(() => Promise.resolve()); + +const confirmAllContentSpy = jest.spyOn(confirmAllContentModule, 'confirmAllContent'); +const asyncQuestionSpy = jest.spyOn(questionHelpers, 'asyncQuestion'); jest.mock('../../services/dynamic-content-client-factory'); -jest.mock('../../common/content-item/confirm-all-content'); jest.mock('../../common/log-helpers'); jest.mock('../../common/filter/fetch-content'); -jest.mock('readline'); +jest.mock('../../common/content-item/get-content-items-by-ids', () => { + return { + getContentByIds: jest.fn() + }; +}); jest.mock('../../common/publishing/content-item-unpublishing-service', () => { return { ContentItemUnpublishingService: jest.fn().mockImplementation(() => { return { unpublish: mockUnpublish, - onIdle: jest.fn() + onIdle: mockUnpublishOnIdle }; }) }; }); -const mockClient = { - contentItems: { - get: jest.fn() - }, - hubs: { - get: jest.fn() - } -} as unknown as DynamicContent; - -const mockLog = { - open: jest.fn().mockReturnValue({ - appendLine: jest.fn(), - addComment: jest.fn(), - close: jest.fn() - }) -} as unknown as FileLog; - describe('unpublish tests', () => { describe('builder tests', () => { it('should configure yargs', function () { @@ -97,171 +90,196 @@ describe('unpublish tests', () => { }); }); - describe('getContentItems tests', () => { - beforeEach(() => jest.clearAllMocks()); - - it('should return content items by id', async () => { - const mockItem = { id: '1', status: Status.ACTIVE } as ContentItem; - mockClient.contentItems.get = jest.fn().mockResolvedValue(mockItem); - - const result = await getContentItems({ - client: mockClient, - id: '1', - hubId: 'hub-id' - }); - - expect(result.contentItems).toEqual([mockItem]); - expect(result.missingContent).toBe(false); - }); - - it('should filter out non-active content items', async () => { - mockClient.contentItems.get = jest - .fn() - .mockResolvedValueOnce({ id: '1', status: Status.ARCHIVED }) - .mockResolvedValueOnce({ id: '2', status: Status.ACTIVE }); - - const result = await getContentItems({ - client: mockClient, - id: ['1', '2'], - hubId: 'hub-id' - }); - - expect(result.contentItems).toHaveLength(1); - expect(result.contentItems[0].id).toBe('2'); - expect(result.missingContent).toBe(true); - }); - - it('should return content using fallback filters', async () => { - const mockHub = {} as Hub; - const contentItems = [{ id: 'a', status: Status.ACTIVE }] as ContentItem[]; - const getContent = require('../../common/filter/fetch-content').getContent; - mockClient.hubs.get = jest.fn().mockResolvedValue(mockHub); - getContent.mockResolvedValue(contentItems); + describe('handler', () => { + const HUB_ID = '67d1c1c7642fa239dbe15164'; + const CONTENT_ITEM_ID = 'c5b659df-680e-4711-bfbe-84eaa10d76cc'; + const globalArgs = { + $0: 'test', + _: ['test'], + json: true, + clientId: 'client-id', + clientSecret: 'client-secret', + hubId: HUB_ID + }; + const mockAppendLine = jest.fn(); + const mockLog = { + open: jest.fn().mockReturnValue({ + appendLine: mockAppendLine, + addComment: jest.fn(), + close: jest.fn() + }) + } as unknown as FileLog; - const result = await getContentItems({ - client: mockClient, - hubId: 'hub-id', - facet: 'label:test' - }); - - expect(result.contentItems).toEqual(contentItems); - }); - }); - - describe('processItems tests', () => { beforeEach(() => { jest.clearAllMocks(); - jest.mock('readline'); + confirmAllContentSpy.mockResolvedValue(true); + asyncQuestionSpy.mockResolvedValue(true); }); - it('should exit early if no content items', async () => { - console.log = jest.fn(); - - await processItems({ - contentItems: [], - logFile: mockLog, - allContent: false, - missingContent: false + it('should publish content item by id', async () => { + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } }); + (getContentByIds as unknown as jest.Mock).mockResolvedValue([ + new ContentItem({ id: CONTENT_ITEM_ID, body: { _meta: {} } }) + ]); - expect(console.log).toHaveBeenCalledWith('Nothing found to unpublish, aborting.'); - }); - - it('should confirm before unpublishing when force is false', async () => { - const confirmAllContent = require('../../common/content-item/confirm-all-content').confirmAllContent; - confirmAllContent.mockResolvedValue(false); - console.log = jest.fn(); + mockUnpublish.mockImplementation((contentItem, fn) => { + fn(new Job({ id: '68e5289f0aba3024bde050f9', status: 'COMPLETE' })); + }); - await processItems({ - contentItems: [new ContentItem({ id: '1', label: 'Test', body: { _meta: {} } })], - force: false, - silent: true, - logFile: mockLog, - allContent: false, - missingContent: false + await handler({ + ...globalArgs, + id: CONTENT_ITEM_ID, + logFile: mockLog }); - expect(confirmAllContent).toHaveBeenCalled(); + expect(getContentByIds).toHaveBeenCalledWith(expect.any(Object), [CONTENT_ITEM_ID]); + expect(mockUnpublish).toHaveBeenCalledTimes(1); + expect(mockUnpublish).toHaveBeenCalledWith(expect.any(ContentItem), expect.any(Function)); + expect(mockUnpublishOnIdle).toHaveBeenCalledTimes(1); }); + it('should publish content items by query', async () => { + const REPOSITORY_ID = '67d1c1cf642fa239dbe15165'; + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } + }); + (getContent as unknown as jest.Mock).mockResolvedValue([ + new ContentItem({ id: CONTENT_ITEM_ID, body: { _meta: {} } }) + ]); - it('should process all items and call unpublish', async () => { - const contentItem = new ContentItem({ - id: '1', - label: 'Unpublish Me', - body: { _meta: {} } + mockUnpublish.mockImplementation((contentItem, fn) => { + fn(new Job({ id: '68e5289f0aba3024bde050f9', status: 'COMPLETE' })); }); - await processItems({ - contentItems: [contentItem], - force: true, - silent: true, - logFile: mockLog, - allContent: false, - missingContent: false + await handler({ + ...globalArgs, + repoId: REPOSITORY_ID, + logFile: mockLog }); + expect(getContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Hub), undefined, { + enrichItems: true, + folderId: undefined, + repoId: REPOSITORY_ID, + status: 'ACTIVE' + }); expect(mockUnpublish).toHaveBeenCalledTimes(1); + expect(mockUnpublish).toHaveBeenCalledWith(expect.any(ContentItem), expect.any(Function)); + expect(mockUnpublishOnIdle).toHaveBeenCalledTimes(1); }); - it('should process all items while filtering out any dependencies and call unpublish', async () => { - const contentItemDependency = new ContentItem({ + it('should process only process content items with an unpublishable status', async () => { + const publishedContentItem = new ContentItem({ + id: 'da2ee918-34c3-4fc1-ae05-111111111111', + label: 'Published - unpublish me', + publishingStatus: ContentItemPublishingStatus.LATEST, + body: { + _meta: {}, + text: 'text 1' + } + }); + const unpublishedContentItemDependency = new ContentItem({ id: 'da2ee918-34c3-4fc1-ae05-222222222222', - label: 'No need to unpublish me', - body: { _meta: {} } + label: 'Already unpublished - ignore me', + publishingStatus: ContentItemPublishingStatus.UNPUBLISHED, + body: { + _meta: {}, + text: 'text 1' + } }); - - const contentItemWithDependency = new ContentItem({ - id: 'da2ee918-34c3-4fc1-ae05-111111111111', - label: 'Unpublish me', + const notPublishedContentItemDependency = new ContentItem({ + id: 'da2ee918-34c3-4fc1-ae05-333333333333', + label: 'Never been published - ignore me', + publishingStatus: ContentItemPublishingStatus.NONE, body: { _meta: {}, - dependency: contentItemDependency + text: 'text 1' } }); - await processItems({ - contentItems: [contentItemWithDependency], - force: true, - silent: true, - logFile: mockLog, - allContent: false, - missingContent: false + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } + }); + (getContentByIds as unknown as jest.Mock).mockResolvedValue([ + publishedContentItem, + unpublishedContentItemDependency, + notPublishedContentItemDependency + ]); + + mockUnpublish.mockImplementation((contentItem, fn) => { + fn(new ContentItem({ id: '68e5289f0aba3024bde050f9' })); + }); + + await handler({ + ...globalArgs, + id: [publishedContentItem.id, unpublishedContentItemDependency.id, notPublishedContentItemDependency.id], + logFile: mockLog }); expect(mockUnpublish).toHaveBeenCalledTimes(1); + expect(mockUnpublish).toHaveBeenCalledWith(publishedContentItem, expect.any(Function)); + expect(mockUnpublishOnIdle).toHaveBeenCalledTimes(1); }); - }); - describe('handler tests', () => { - const clientFactory = require('../../services/dynamic-content-client-factory').default; - const getItemsSpy = jest.spyOn(require('./unpublish'), 'getContentItems'); - const processSpy = jest.spyOn(require('./unpublish'), 'processItems'); - beforeEach(() => { - jest.clearAllMocks(); - clientFactory.mockReturnValue(mockClient); - getItemsSpy.mockResolvedValue({ - contentItems: [{ id: '123', label: 'Test', status: Status.ACTIVE }], - missingContent: false + it('should exit before processing content items if confirmation to proceed is rejected', async () => { + confirmAllContentSpy.mockResolvedValue(false); + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } + }); + (getContentByIds as unknown as jest.Mock).mockResolvedValue([ + new ContentItem({ id: CONTENT_ITEM_ID, body: { _meta: {} } }) + ]); + + await handler({ + ...globalArgs, + id: CONTENT_ITEM_ID, + logFile: mockLog }); - processSpy.mockResolvedValue(undefined); + expect(mockUnpublish).toHaveBeenCalledTimes(0); + expect(mockUnpublishOnIdle).toHaveBeenCalledTimes(0); }); - it('should warn when both id and facet are provided', async () => { - console.log = jest.fn(); + + it('should exit early if ID or query args are not passed', async () => { await handler({ - id: '1', - facet: 'label:test', - hubId: 'hub-id', + ...globalArgs, + id: CONTENT_ITEM_ID, + facet: 'mock-facet', logFile: mockLog - } as Arguments); - expect(console.log).toHaveBeenCalledWith('Please specify either a facet or an ID - not both.'); + }); + expect(mockAppendLine).toHaveBeenCalledWith('Please specify either a facet or an ID - not both'); + expect(mockUnpublish).toHaveBeenCalledTimes(0); + expect(mockUnpublishOnIdle).toHaveBeenCalledTimes(0); }); - it('should process items with valid inputs', async () => { + + it('should exit early if no content items', async () => { + const mockGetHub = jest.fn(); + (dynamicContentClientFactory as jest.Mock).mockReturnValue({ + hubs: { + get: mockGetHub.mockResolvedValue(new Hub({ id: HUB_ID })) + } + }); + (getContentByIds as unknown as jest.Mock).mockResolvedValue([]); await handler({ - hubId: 'hub-id', + ...globalArgs, + id: CONTENT_ITEM_ID, logFile: mockLog - } as Arguments); - expect(getItemsSpy).toHaveBeenCalled(); - expect(processSpy).toHaveBeenCalled(); + }); + expect(mockAppendLine).toHaveBeenCalledWith('Nothing found to unpublish, aborting'); + expect(mockUnpublish).toHaveBeenCalledTimes(0); + expect(mockUnpublishOnIdle).toHaveBeenCalledTimes(0); }); }); }); diff --git a/src/commands/content-item/unpublish.ts b/src/commands/content-item/unpublish.ts index 35824bd8..fd1f37e1 100644 --- a/src/commands/content-item/unpublish.ts +++ b/src/commands/content-item/unpublish.ts @@ -1,16 +1,16 @@ -/* eslint-disable @typescript-eslint/ban-ts-comment */ import { Arguments, Argv } from 'yargs'; import { ConfigurationParameters } from '../configure'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { confirmAllContent } from '../../common/content-item/confirm-all-content'; import PublishOptions from '../../common/publish/publish-options'; -import { ContentItem, ContentItemPublishingStatus, DynamicContent, Status } from 'dc-management-sdk-js'; +import { ContentItem, ContentItemPublishingStatus, Status } from 'dc-management-sdk-js'; import { getDefaultLogPath, createLog } from '../../common/log-helpers'; import { FileLog } from '../../common/file-log'; import { withOldFilters } from '../../common/filter/facet'; import { getContent } from '../../common/filter/fetch-content'; import { progressBar } from '../../common/progress-bar/progress-bar'; import { ContentItemUnpublishingService } from '../../common/publishing/content-item-unpublishing-service'; +import { getContentByIds } from '../../common/content-item/get-content-items-by-ids'; export const command = 'unpublish [id]'; @@ -67,106 +67,20 @@ export const builder = (yargs: Argv): void => { }); }; -export const getContentItems = async ({ - client, - id, - hubId, - repoId, - folderId, - facet -}: { - client: DynamicContent; - id?: string | string[]; - hubId: string; - repoId?: string | string[]; - folderId?: string | string[]; - facet?: string; -}): Promise<{ contentItems: ContentItem[]; missingContent: boolean }> => { - try { - let contentItems: ContentItem[] = []; - - if (id != null) { - const itemIds = Array.isArray(id) ? id : [id]; - const items: ContentItem[] = []; - - for (const id of itemIds) { - try { - items.push(await client.contentItems.get(id)); - } catch { - // Missing item. - } - } - - contentItems.push(...items.filter(item => item.status === Status.ACTIVE)); - - return { - contentItems, - missingContent: contentItems.length != itemIds.length - }; - } - - const hub = await client.hubs.get(hubId); - - contentItems = await getContent(client, hub, facet, { repoId, folderId, status: Status.ACTIVE, enrichItems: true }); - - return { contentItems, missingContent: false }; - } catch (err) { - console.log(err); - - return { - contentItems: [], - missingContent: false - }; - } -}; - export const processItems = async ({ contentItems, - force, - silent, - logFile, - allContent, - missingContent + log }: { contentItems: ContentItem[]; - force?: boolean; - silent?: boolean; - logFile: FileLog; - allContent: boolean; - missingContent: boolean; + log: FileLog; }): Promise => { - if (contentItems.length == 0) { - console.log('Nothing found to unpublish, aborting.'); - return; - } - - const rootContentPublishedItems = contentItems.filter( - item => - item.publishingStatus !== ContentItemPublishingStatus.UNPUBLISHED && - item.publishingStatus !== ContentItemPublishingStatus.NONE - ); - - const log = logFile.open(); - log.appendLine(`Found ${rootContentPublishedItems.length} items to unpublish.`); - - if (rootContentPublishedItems.length === 0) { - return; - } - - if (!force) { - const yes = await confirmAllContent('unpublish', 'content items', allContent, missingContent); - if (!yes) { - return; - } - } - - log.appendLine(`Unpublishing ${rootContentPublishedItems.length} items.`); + log.appendLine(`Unpublishing ${contentItems.length} items.`); const unpublishingService = new ContentItemUnpublishingService(); const contentItemUnpublishJobs: ContentItem[] = []; - const unpublishProgress = progressBar(rootContentPublishedItems.length, 0, { title: 'Unpublishing content items' }); + const unpublishProgress = progressBar(contentItems.length, 0, { title: 'Unpublishing content items' }); - for (const item of rootContentPublishedItems) { + for (const item of contentItems) { try { await unpublishingService.unpublish(item, contentItem => { contentItemUnpublishJobs.push(contentItem); @@ -182,14 +96,11 @@ export const processItems = async ({ await unpublishingService.onIdle(); unpublishProgress.stop(); - - log.appendLine(`The request for content item/s to be unpublished has been completed - please manually verify.`); - - await log.close(!silent); }; export const handler = async (argv: Arguments): Promise => { const { id, logFile, force, silent, hubId, repoId, folderId } = argv; + const log = logFile.open(); const client = dynamicContentClientFactory(argv); const facet = withOldFilters(argv.facet, argv); @@ -197,37 +108,62 @@ export const handler = async (argv: Arguments 0 + ? await getContentByIds(client, ids) + : await getContent(client, hub, facet, { repoId, folderId, status: Status.ACTIVE, enrichItems: true }); + + const unpublishableContentItems = contentItems.filter( + item => + item.publishingStatus !== ContentItemPublishingStatus.UNPUBLISHED && + item.publishingStatus !== ContentItemPublishingStatus.NONE + ); + + if (!unpublishableContentItems.length) { + log.appendLine('Nothing found to unpublish, aborting'); + return; + } + + const missingContentItems = ids.length > 0 ? Boolean(ids.length !== unpublishableContentItems.length) : false; + + log.appendLine(`Found ${unpublishableContentItems.length} content items to unpublish\n`); + + if (!force) { + const yes = await confirmAllContent('unpublish', 'content items', allContent, missingContentItems); + if (!yes) { + return; + } + } await processItems({ - contentItems, - force, - silent, - logFile, - allContent, - missingContent + contentItems: unpublishableContentItems, + log }); + + log.appendLine(`Unpublish complete - please manually verify unpublish status`); + + await log.close(!silent); }; From 163cccb0685dc0692a19b8fd0ddb6132a25e7ca4 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 31 Oct 2025 09:55:49 +0000 Subject: [PATCH 37/43] feat: update archive to use shared content item fetch helpers --- src/commands/content-item/archive.spec.ts | 223 ++++++++-------------- src/commands/content-item/archive.ts | 176 ++++++----------- src/commands/content-item/publish.spec.ts | 28 +-- src/commands/content-item/publish.ts | 4 +- src/commands/content-item/sync.ts | 6 +- src/commands/content-item/unpublish.ts | 4 +- 6 files changed, 150 insertions(+), 291 deletions(-) diff --git a/src/commands/content-item/archive.spec.ts b/src/commands/content-item/archive.spec.ts index 39280974..9610f0e2 100644 --- a/src/commands/content-item/archive.spec.ts +++ b/src/commands/content-item/archive.spec.ts @@ -1,4 +1,4 @@ -import { builder, command, handler, LOG_FILENAME, getContentItems, processItems, coerceLog } from './archive'; +import { builder, command, handler, LOG_FILENAME, coerceLog } from './archive'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { ContentRepository, ContentItem, Folder, Status } from 'dc-management-sdk-js'; import Yargs from 'yargs/yargs'; @@ -6,7 +6,7 @@ import readline from 'readline'; import MockPage from '../../common/dc-management-sdk-js/mock-page'; import { dirname } from 'path'; import { promisify } from 'util'; -import { exists, readFile, unlink, mkdir, writeFile } from 'fs'; +import { readFile, unlink, mkdir, writeFile, existsSync } from 'fs'; import { FileLog, setVersion } from '../../common/file-log'; import { createLog, getDefaultLogPath } from '../../common/log-helpers'; import * as fetchContentModule from '../../common/filter/fetch-content'; @@ -51,7 +51,7 @@ describe('content-item archive command', () => { mockItemUpdate: () => void; mockRepoGet: () => void; mockFolderGet: () => void; - mockFacet: () => void; + mockGetContent: () => void; contentItems: ContentItem[]; } => { const mockGet = jest.fn(); @@ -62,7 +62,7 @@ describe('content-item archive command', () => { const mockItemUpdate = jest.fn(); const mockRepoGet = jest.fn(); const mockFolderGet = jest.fn(); - const mockFacet = jest.spyOn(fetchContentModule, 'getContent') as jest.Mock; + const mockGetContent = jest.spyOn(fetchContentModule, 'getContent') as jest.Mock; const contentItems = [ new ContentItem({ @@ -209,7 +209,7 @@ describe('content-item archive command', () => { mockItemsList.mockResolvedValue(new MockPage(ContentItem, contentItems)); - mockFacet.mockResolvedValue(contentItems); + mockGetContent.mockResolvedValue(contentItems); if (archiveError) { mockArchive.mockRejectedValue(new Error('Error')); @@ -226,7 +226,7 @@ describe('content-item archive command', () => { mockItemUpdate, mockRepoGet, mockFolderGet, - mockFacet, + mockGetContent, contentItems }; }; @@ -329,7 +329,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockFacet, mockArchive } = mockValues(); + const { mockGet, mockGetContent, mockArchive } = mockValues(); const argv = { ...yargArgs, @@ -338,7 +338,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ACTIVE, enrichItems: true }); @@ -349,7 +349,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockArchive, mockItemGetById, mockFacet } = mockValues(); + const { mockArchive, mockItemGetById, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -360,7 +360,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockItemGetById).toHaveBeenCalled(); - expect(mockFacet).not.toHaveBeenCalled(); + expect(mockGetContent).not.toHaveBeenCalled(); expect(mockArchive).toHaveBeenCalledTimes(1); }); @@ -368,7 +368,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockArchive, mockItemGetById, mockFacet } = mockValues(true); + const { mockArchive, mockItemGetById, mockGetContent } = mockValues(true); const argv = { ...yargArgs, @@ -378,7 +378,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockItemGetById).toHaveBeenCalled(); - expect(mockFacet).not.toHaveBeenCalled(); + expect(mockGetContent).not.toHaveBeenCalled(); expect(mockArchive).not.toHaveBeenCalled(); }); @@ -386,7 +386,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockArchive, mockFacet } = mockValues(); + const { mockGet, mockArchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -396,7 +396,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ACTIVE, repoId: 'repo1', enrichItems: true @@ -408,7 +408,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockArchive, mockFacet } = mockValues(); + const { mockGet, mockArchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -418,7 +418,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ACTIVE, repoId: ['repo1', 'repo2'], enrichItems: true @@ -430,7 +430,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockArchive, mockFacet } = mockValues(); + const { mockGet, mockArchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -441,7 +441,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ACTIVE, folderId: 'folder1', repoId: 'repo123', @@ -454,7 +454,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockArchive, mockFacet } = mockValues(); + const { mockGet, mockArchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -464,7 +464,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ACTIVE, folderId: ['folder1', 'folder1'], enrichItems: true @@ -476,7 +476,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockArchive, mockFacet } = mockValues(); + const { mockGet, mockArchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -487,7 +487,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item1', { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item1', { folderId: 'folder1', status: Status.ACTIVE, enrichItems: true @@ -499,7 +499,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockArchive, mockFolderGet, mockItemsList, mockFacet } = mockValues(); + const { mockArchive, mockFolderGet, mockItemsList, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -509,7 +509,7 @@ describe('content-item archive command', () => { }; await handler(argv); - expect(mockFacet).not.toHaveBeenCalled(); + expect(mockGetContent).not.toHaveBeenCalled(); expect(mockFolderGet).not.toHaveBeenCalled(); expect(mockItemsList).not.toHaveBeenCalled(); expect(mockArchive).not.toHaveBeenCalled(); @@ -519,10 +519,10 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockArchive, mockFacet } = mockValues(); + const { mockGet, mockArchive, mockGetContent } = mockValues(); - (mockFacet as jest.Mock).mockReset(); - (mockFacet as jest.Mock).mockResolvedValue([]); + (mockGetContent as jest.Mock).mockReset(); + (mockGetContent as jest.Mock).mockResolvedValue([]); const argv = { ...yargArgs, @@ -533,7 +533,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item3', { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item3', { folderId: 'folder1', status: Status.ACTIVE, enrichItems: true @@ -545,7 +545,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['n']); - const { mockGet, mockArchive, mockFacet } = mockValues(); + const { mockGet, mockArchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -556,7 +556,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item1', { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item1', { folderId: 'folder1', status: Status.ACTIVE, enrichItems: true @@ -568,7 +568,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockArchive, mockFacet } = mockValues(); + const { mockGet, mockArchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -578,7 +578,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'schema:http://test.com', { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'schema:http://test.com', { status: Status.ACTIVE, enrichItems: true }); @@ -589,7 +589,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockArchive, mockFacet } = mockValues(true); + const { mockGet, mockArchive, mockGetContent } = mockValues(true); const argv = { ...yargArgs, @@ -599,7 +599,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ACTIVE, enrichItems: true }); @@ -610,7 +610,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockArchive, mockFacet } = mockValues(true); + const { mockGet, mockArchive, mockGetContent } = mockValues(true); const argv = { ...yargArgs, @@ -620,7 +620,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ACTIVE, enrichItems: true }); @@ -631,7 +631,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['input', 'ignored']); - const { mockGet, mockArchive, mockFacet } = mockValues(); + const { mockGet, mockArchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -641,7 +641,7 @@ describe('content-item archive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ACTIVE, enrichItems: true }); @@ -656,7 +656,7 @@ describe('content-item archive command', () => { const log = '// Type log test file\n' + 'UNARCHIVE 1\n' + 'UNARCHIVE 2\n' + 'UNARCHIVE idMissing\n'; const dir = dirname(logFileName); - if (!(await promisify(exists)(dir))) { + if (!existsSync(dir)) { await promisify(mkdir)(dir); } await promisify(writeFile)(logFileName, log); @@ -683,23 +683,24 @@ describe('content-item archive command', () => { expect(mockArchive).toHaveBeenCalledTimes(2); }); - it("shouldn't archive content items, getFacet error", async () => { + it('should not archive content items when getContent throws an error', async () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['input', 'ignored']); - const { mockArchive, mockFacet } = mockValues(true); + const { mockArchive, mockGetContent } = mockValues(true); - (mockFacet as jest.Mock).mockReset(); - (mockFacet as jest.Mock).mockRejectedValue(new Error('Simulated Error')); + (mockGetContent as jest.Mock).mockReset(); + (mockGetContent as jest.Mock).mockRejectedValue(new Error('Simulated error')); const argv = { ...yargArgs, ...config, folderId: 'folder1' }; - await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + await expect(handler(argv)).rejects.toThrowErrorMatchingInlineSnapshot(`"Simulated error"`); + + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { folderId: 'folder1', status: Status.ACTIVE, enrichItems: true @@ -707,11 +708,11 @@ describe('content-item archive command', () => { expect(mockArchive).not.toHaveBeenCalled(); }); - it("shouldn't archive content items, revertLog error", async () => { + it('should not archive content items when revertLog does not exist', async () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - if (await promisify(exists)(`temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`)) { + if (existsSync(`temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`)) { await promisify(unlink)(`temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`); } @@ -719,12 +720,12 @@ describe('content-item archive command', () => { const log = '// Type log test file\n' + 'UNARCHIVE 1\n' + 'UNARCHIVE 2\n' + 'UNARCHIVE idMissing'; const dir = dirname(logFileName); - if (!(await promisify(exists)(dir))) { + if (!existsSync(dir)) { await promisify(mkdir)(dir); } await promisify(writeFile)(logFileName, log); - const { mockArchive, mockItemGetById, mockFacet } = mockValues(true); + const { mockArchive, mockItemGetById, mockGetContent } = mockValues(true); const argv = { ...yargArgs, @@ -733,10 +734,13 @@ describe('content-item archive command', () => { force: true, revertLog: 'wrongFileName.log' }; - await handler(argv); + + await expect(handler(argv)).rejects.toThrowErrorMatchingInlineSnapshot( + `"ENOENT: no such file or directory, open 'wrongFileName.log'"` + ); expect(mockItemGetById).not.toHaveBeenCalled(); - expect(mockFacet).not.toHaveBeenCalled(); + expect(mockGetContent).not.toHaveBeenCalled(); expect(mockArchive).not.toHaveBeenCalled(); }); @@ -744,7 +748,7 @@ describe('content-item archive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - if (await promisify(exists)(`temp_${process.env.JEST_WORKER_ID}/content-item-archive.log`)) { + if (existsSync(`temp_${process.env.JEST_WORKER_ID}/content-item-archive.log`)) { await promisify(unlink)(`temp_${process.env.JEST_WORKER_ID}/content-item-archive.log`); } @@ -764,7 +768,7 @@ describe('content-item archive command', () => { expect(mockItemGetById).toHaveBeenCalled(); expect(mockArchive).toHaveBeenCalled(); - const logExists = await promisify(exists)(`temp_${process.env.JEST_WORKER_ID}/content-item-archive.log`); + const logExists = existsSync(`temp_${process.env.JEST_WORKER_ID}/content-item-archive.log`); expect(logExists).toBeTruthy(); @@ -842,104 +846,31 @@ describe('content-item archive command', () => { expect(mockItemUpdate).toHaveBeenCalledTimes(1); expect((mockItemUpdate as jest.Mock).mock.calls[0][1].ignoreSchemaValidation).toBe(true); }); - }); - describe('getContentItems tests', () => { - it('should get content items by id', async () => { - const result = await getContentItems({ - client: dynamicContentClientFactory({ - ...config, - ...yargArgs - }), - id: '1', - hubId: 'hub1' - }); - - if (result) { - expect(result.contentItems.length).toBeGreaterThanOrEqual(1); - - expect(result.contentItems[0].id).toMatch('1'); - } - }); - - it('should get content items all', async () => { - const result = await getContentItems({ - client: dynamicContentClientFactory({ - ...config, - ...yargArgs - }), - hubId: 'hub1' - }); - - if (result) { - expect(result.contentItems.length).toBe(2); - } - }); - - it('should get content items by repo', async () => { - const result = await getContentItems({ - client: dynamicContentClientFactory({ - ...config, - ...yargArgs - }), - hubId: 'hub1', - repoId: 'repo1' - }); - - if (result) { - expect(result.contentItems.length).toBe(2); - } - }); - - it('should get content items by folder', async () => { - const result = await getContentItems({ - client: dynamicContentClientFactory({ - ...config, - ...yargArgs - }), - hubId: 'hub1', - folderId: 'folder1' - }); - - if (result) { - expect(result.contentItems.length).toBe(2); - } - }); - }); - - describe('processItems tests', () => { - it('should archive content items', async () => { - const { contentItems, mockArchive } = mockValues(); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (readline as any).setResponses(['y']); - - await processItems({ - contentItems, - allContent: true, - missingContent: false, - logFile: createLog('./logFile.log') + it('should not archive content items', async () => { + const { mockGet, mockItemGetById, mockArchive } = mockValues(); + const logFile = new FileLog(); + const mockAppendFile = jest.fn(); + logFile.open = jest.fn().mockImplementation(() => { + return { + appendLine: mockAppendFile + }; }); + const argv = { + ...yargArgs, + ...config, + id: 'repo123', + logFile + }; - expect(mockArchive).toHaveBeenCalledTimes(2); - - if (await promisify(exists)('./logFile.log')) { - await promisify(unlink)('./logFile.log'); - } - }); - - it('should not archive content items', async () => { - jest.spyOn(global.console, 'log'); + (mockItemGetById as jest.Mock).mockResolvedValue([]); - await processItems({ - contentItems: [], - allContent: true, - missingContent: false, - logFile: new FileLog() - }); + await handler(argv); - expect(console.log).toHaveBeenCalled(); - expect(console.log).toHaveBeenLastCalledWith('Nothing found to archive, aborting.'); + expect(mockGet).toHaveBeenCalled(); + expect(mockArchive).not.toHaveBeenCalled(); + expect(mockAppendFile).toHaveBeenCalled(); + expect(mockAppendFile).toHaveBeenLastCalledWith('Nothing found to archive, aborting'); }); }); }); diff --git a/src/commands/content-item/archive.ts b/src/commands/content-item/archive.ts index a14bb4fa..8554bef2 100644 --- a/src/commands/content-item/archive.ts +++ b/src/commands/content-item/archive.ts @@ -4,12 +4,13 @@ import dynamicContentClientFactory from '../../services/dynamic-content-client-f import { ArchiveLog } from '../../common/archive/archive-log'; import { confirmAllContent } from '../../common/content-item/confirm-all-content'; import ArchiveOptions from '../../common/archive/archive-options'; -import { ContentItem, DynamicContent, Status } from 'dc-management-sdk-js'; +import { ContentItem, Status } from 'dc-management-sdk-js'; import { getDefaultLogPath, createLog } from '../../common/log-helpers'; import { FileLog } from '../../common/file-log'; import { withOldFilters } from '../../common/filter/facet'; import { getContent } from '../../common/filter/fetch-content'; import { progressBar } from '../../common/progress-bar/progress-bar'; +import { getContentByIds } from '../../common/content-item/get-content-items-by-ids'; export const command = 'archive [id]'; @@ -86,108 +87,20 @@ export const builder = (yargs: Argv): void => { }); }; -export const getContentItems = async ({ - client, - id, - hubId, - repoId, - folderId, - revertLog, - facet -}: { - client: DynamicContent; - id?: string | string[]; - hubId: string; - repoId?: string | string[]; - folderId?: string | string[]; - revertLog?: string; - facet?: string; -}): Promise<{ contentItems: ContentItem[]; missingContent: boolean }> => { - try { - let contentItems: ContentItem[] = []; - - if (revertLog != null) { - const log = await new ArchiveLog().loadFromFile(revertLog); - id = log.getData('UNARCHIVE'); - } - - if (id != null) { - const itemIds = Array.isArray(id) ? id : [id]; - const items: ContentItem[] = []; - - for (const id of itemIds) { - try { - items.push(await client.contentItems.get(id)); - } catch { - // Missing item. - } - } - - contentItems.push(...items.filter(item => item.status === Status.ACTIVE)); - - return { - contentItems, - missingContent: contentItems.length != itemIds.length - }; - } - - const hub = await client.hubs.get(hubId); - - contentItems = await getContent(client, hub, facet, { repoId, folderId, status: Status.ACTIVE, enrichItems: true }); - - return { contentItems, missingContent: false }; - } catch (err) { - console.log(err); - - return { - contentItems: [], - missingContent: false - }; - } -}; - -export const processItems = async ({ +const processItems = async ({ contentItems, - force, - silent, - logFile, - allContent, - missingContent, + log, ignoreError, ignoreSchemaValidation }: { contentItems: ContentItem[]; - force?: boolean; - silent?: boolean; - logFile: FileLog; - allContent: boolean; - missingContent: boolean; + log: FileLog; ignoreError?: boolean; ignoreSchemaValidation?: boolean; -}): Promise => { - if (contentItems.length == 0) { - console.log('Nothing found to archive, aborting.'); - return; - } - - console.log('The following content items will be archived:'); - contentItems.forEach((contentItem: ContentItem) => { - console.log(` ${contentItem.label} (${contentItem.id})`); - }); - console.log(`Total: ${contentItems.length}`); - - if (!force) { - const yes = await confirmAllContent('archive', 'content item', allContent, missingContent); - if (!yes) { - return; - } - } - - const log = logFile.open(); - +}): Promise<{ failedArchives: ContentItem[] }> => { const progress = progressBar(contentItems.length, 0, { title: 'Archiving content items' }); + const failedArchives = []; - let successCount = 0; for (let i = 0; i < contentItems.length; i++) { try { const deliveryKey = contentItems[i].body._meta.deliveryKey; @@ -206,17 +119,17 @@ export const processItems = async ({ await contentItems[i].related.archive(); progress.increment(); log.addAction('ARCHIVE', `${args}`); - successCount++; } catch (e) { + failedArchives.push(contentItems[i]); progress.increment(); log.addComment(`ARCHIVE FAILED: ${contentItems[i].id}`); log.addComment(e.toString()); if (ignoreError) { - log.warn(`Failed to archive ${contentItems[i].label} (${contentItems[i].id}), continuing.`, e); + log.warn(`\nFailed to archive ${contentItems[i].label} (${contentItems[i].id}), continuing.`, e); } else { progress.stop(); - log.error(`Failed to archive ${contentItems[i].label} (${contentItems[i].id}), aborting.`, e); + log.error(`\nFailed to archive ${contentItems[i].label} (${contentItems[i].id}), aborting.`, e); break; } } @@ -224,56 +137,77 @@ export const processItems = async ({ progress.stop(); - await log.close(!silent); - - console.log(`Archived ${successCount} content items.`); + return { failedArchives }; }; export const handler = async (argv: Arguments): Promise => { const { id, logFile, force, silent, ignoreError, hubId, revertLog, repoId, folderId, ignoreSchemaValidation } = argv; + const log = logFile.open(); const client = dynamicContentClientFactory(argv); - const facet = withOldFilters(argv.facet, argv); - const allContent = !id && !facet && !revertLog && !folderId && !repoId; if (repoId && id) { - console.log('ID of content item is specified, ignoring repository ID'); + log.appendLine('ID of content item is specified, ignoring repository ID'); } if (id && facet) { - console.log('Please specify either a facet or an ID - not both.'); + log.appendLine('Please specify either a facet or an ID - not both.'); return; } if (repoId && folderId) { - console.log('Folder is specified, ignoring repository ID'); + log.appendLine('Folder is specified, ignoring repository ID'); } if (allContent) { - console.log('No filter was given, archiving all content'); + log.appendLine('No filter was given, archiving all content'); } - const { contentItems, missingContent } = await getContentItems({ - client, - id, - hubId, - repoId, - folderId, - revertLog, - facet - }); + let ids: string[] = []; + + if (id) { + ids = Array.isArray(id) ? id : [id]; + } + + if (revertLog) { + const log = await new ArchiveLog().loadFromFile(revertLog); + ids = log.getData('UNARCHIVE'); + } + + const hub = await client.hubs.get(hubId); + const contentItems = ids.length + ? (await getContentByIds(client, ids)).filter(item => item.status === Status.ACTIVE) + : await getContent(client, hub, facet, { repoId, folderId, status: Status.ACTIVE, enrichItems: true }); - await processItems({ + if (!contentItems.length) { + log.appendLine('Nothing found to archive, aborting'); + return; + } + + const missingContentItems = ids.length > 0 ? Boolean(ids.length !== contentItems.length) : false; + log.appendLine(`Found ${contentItems.length} content items to archive`); + + if (!force) { + const yes = await confirmAllContent('archive', 'content item', allContent, missingContentItems); + if (!yes) { + return; + } + } + + const { failedArchives } = await processItems({ contentItems, - force, - silent, - logFile, - allContent, - missingContent, + log, ignoreError, ignoreSchemaValidation }); + + const failedArchiveMsg = failedArchives.length + ? `with ${failedArchives.length} failed archives - check logs for details` + : ``; + log.appendLine(`Archived content items ${failedArchiveMsg}`); + + await log.close(!silent); }; // log format: diff --git a/src/commands/content-item/publish.spec.ts b/src/commands/content-item/publish.spec.ts index 70df92e2..a310d18a 100644 --- a/src/commands/content-item/publish.spec.ts +++ b/src/commands/content-item/publish.spec.ts @@ -254,10 +254,10 @@ describe('publish tests', () => { id: CONTENT_ITEM_ID, logFile: mockLog }); - expect(mockPublish).toHaveBeenCalledTimes(0); - expect(mockPublishOnIdle).toHaveBeenCalledTimes(0); - expect(mockCheck).toHaveBeenCalledTimes(0); - expect(mockCheckOnIdle).toHaveBeenCalledTimes(0); + expect(mockPublish).not.toHaveBeenCalled(); + expect(mockPublishOnIdle).not.toHaveBeenCalled(); + expect(mockCheck).not.toHaveBeenCalled(); + expect(mockCheckOnIdle).not.toHaveBeenCalled(); }); it('should not check publishing jobs if check question is rejected', async () => { @@ -284,8 +284,8 @@ describe('publish tests', () => { expect(mockPublish).toHaveBeenCalledTimes(1); expect(mockPublishOnIdle).toHaveBeenCalledTimes(1); - expect(mockCheck).toHaveBeenCalledTimes(0); - expect(mockCheckOnIdle).toHaveBeenCalledTimes(0); + expect(mockCheck).not.toHaveBeenCalled(); + expect(mockCheckOnIdle).not.toHaveBeenCalled(); }); it('should exit early if ID or query args are not passed', async () => { @@ -296,10 +296,10 @@ describe('publish tests', () => { logFile: mockLog }); expect(mockAppendLine).toHaveBeenCalledWith('Please specify either a facet or an ID - not both'); - expect(mockPublish).toHaveBeenCalledTimes(0); - expect(mockPublishOnIdle).toHaveBeenCalledTimes(0); - expect(mockCheck).toHaveBeenCalledTimes(0); - expect(mockCheckOnIdle).toHaveBeenCalledTimes(0); + expect(mockPublish).not.toHaveBeenCalled(); + expect(mockPublishOnIdle).not.toHaveBeenCalled(); + expect(mockCheck).not.toHaveBeenCalled(); + expect(mockCheckOnIdle).not.toHaveBeenCalled(); }); it('should exit early if no content items', async () => { @@ -316,10 +316,10 @@ describe('publish tests', () => { logFile: mockLog }); expect(mockAppendLine).toHaveBeenCalledWith('Nothing found to publish, aborting'); - expect(mockPublish).toHaveBeenCalledTimes(0); - expect(mockPublishOnIdle).toHaveBeenCalledTimes(0); - expect(mockCheck).toHaveBeenCalledTimes(0); - expect(mockCheckOnIdle).toHaveBeenCalledTimes(0); + expect(mockPublish).not.toHaveBeenCalled(); + expect(mockPublishOnIdle).not.toHaveBeenCalled(); + expect(mockCheck).not.toHaveBeenCalled(); + expect(mockCheckOnIdle).not.toHaveBeenCalled(); }); }); }); diff --git a/src/commands/content-item/publish.ts b/src/commands/content-item/publish.ts index 78ab1738..ffc58820 100644 --- a/src/commands/content-item/publish.ts +++ b/src/commands/content-item/publish.ts @@ -139,9 +139,7 @@ export const handler = async (argv: Arguments 0 ? await getContentByIds(client, ids) diff --git a/src/commands/content-item/sync.ts b/src/commands/content-item/sync.ts index 94d66b74..86007132 100644 --- a/src/commands/content-item/sync.ts +++ b/src/commands/content-item/sync.ts @@ -83,8 +83,8 @@ export const handler = async (argv: Arguments 0 ? await getContentByIds(client, ids) diff --git a/src/commands/content-item/unpublish.ts b/src/commands/content-item/unpublish.ts index fd1f37e1..e02cf89a 100644 --- a/src/commands/content-item/unpublish.ts +++ b/src/commands/content-item/unpublish.ts @@ -102,9 +102,7 @@ export const handler = async (argv: Arguments 0 ? await getContentByIds(client, ids) From 3d961d71d95736095a6988fe49e58c25746ad027 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 31 Oct 2025 15:23:22 +0000 Subject: [PATCH 38/43] docs: adding content item sync documentation --- docs/CONTENT-ITEM.md | 65 +++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 61 insertions(+), 4 deletions(-) diff --git a/docs/CONTENT-ITEM.md b/docs/CONTENT-ITEM.md index 0229bbd3..a597d88d 100644 --- a/docs/CONTENT-ITEM.md +++ b/docs/CONTENT-ITEM.md @@ -29,6 +29,7 @@ Return to [README.md](../README.md) for information on other command categories. - [tree](#tree) - [publish](#publish) - [unpublish](#unpublish) + - [sync] (#sync) @@ -416,13 +417,13 @@ If no `id` is provided, all content items in all content repositories in the spe ##### Publish a specific content item by ID ```bash -dc-cli content-item publish 1234abcd +dc-cli content-item publish ba967c23-4c22-4617-a009-0f976d77b81c ``` ##### Publish all content in a specific repository ```bash -dc-cli content-item publish --repoId your-repo-id +dc-cli content-item publish --repoId 67d1c1cf642fa239dbe15165 ``` ##### Use facets to publish filtered content @@ -469,13 +470,13 @@ If no `id` is provided, all content items in all content repositories in the spe ##### Unpublish a specific content item by ID ```bash -dc-cli content-item unpublish 1234abcd +dc-cli content-item unpublish ba967c23-4c22-4617-a009-0f976d77b81c ``` ##### Unpublish all content in a specific repository ```bash -dc-cli content-item unpublish --repoId your-repo-id +dc-cli content-item unpublish --repoId 67d1c1cf642fa239dbe15165 ``` ##### Use facets to unpublish filtered content @@ -483,3 +484,59 @@ dc-cli content-item unpublish --repoId your-repo-id ```bash dc-cli content-item unpublish --facet "locale:en-GB,label:homepage" ``` + +### sync + +> **_NOTE:_** Repository mapping must be setup before using this command + +Sync content items between content hubs. You can sync all items or specify individual content items by ID. + +```bash +dc-cli content-item sync [id] +``` + +If no `id` is provided, all content items in all content repositories in the specified hub will be unpublished. + +--- + +#### Positionals + +| Argument | Description | +| -------- | --------------------------------------------------------------------------------------------------------------------------- | +| `id` | The ID of a content item to be published. If omitted, all content items in all repositories will be published. _(Optional)_ | + +--- + +#### Options + +| Option | Alias | Description | +| -------------------- | ----- | -------------------------------------------------------------------------------------------------------------------------------------------------- | +| `--destinationHubId` | | The destination hub ID to sync the content item with | +| `--repoId` | | The ID of a content repository to restrict sync scope. _(Optional)_ | +| `--folderId` | | The ID of a folder to restrict sync scope. _(Optional)_ | +| `--facet` | | Filter content using facets. Format:
`label:example name,locale:en-GB`
Regex supported with `/pattern/`.
See README for more examples. | +| `-f`, `--force` | | Skip confirmation prompts before sync. | +| `-s`, `--silent` | | Disable log file creation. | +| `--logFile` | | Path to write the log file.
Default: `(log_filename)` | + +--- + +#### Examples + +##### Sync a specific content item by ID + +```bash +dc-cli content-item sync ba967c23-4c22-4617-a009-0f976d77b81c +``` + +##### Sync all content in a specific repository + +```bash +dc-cli content-item sync --repoId 67d1c1cf642fa239dbe15165 +``` + +##### Use facets to sync filtered content + +```bash +dc-cli content-item sync --facet "locale:en-GB,label:homepage" +``` From c1ccaa245dd66b118820a74acac8e8c1562a8238 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 31 Oct 2025 18:16:14 +0000 Subject: [PATCH 39/43] feat: update unarchive to use reusable get content and refactor internal functions --- src/commands/content-item/archive.ts | 1 + src/commands/content-item/unarchive.spec.ts | 255 +++++++----------- src/commands/content-item/unarchive.ts | 251 ++++++++--------- .../archive/content-item-unarchive-options.ts | 15 ++ 4 files changed, 234 insertions(+), 288 deletions(-) create mode 100644 src/common/archive/content-item-unarchive-options.ts diff --git a/src/commands/content-item/archive.ts b/src/commands/content-item/archive.ts index 8554bef2..891c5aa9 100644 --- a/src/commands/content-item/archive.ts +++ b/src/commands/content-item/archive.ts @@ -205,6 +205,7 @@ export const handler = async (argv: Arguments ({ + ...jest.requireActual('../../common/log-helpers'), + getDefaultLogPath: jest.fn() +})); + jest.mock('../../common/filter/fetch-content'); describe('content-item unarchive command', () => { @@ -27,7 +36,8 @@ describe('content-item unarchive command', () => { const config = { clientId: 'client-id', clientSecret: 'client-id', - hubId: 'hub-id' + hubId: 'hub-id', + logFile: new FileLog() }; const mockValues = ( @@ -41,7 +51,7 @@ describe('content-item unarchive command', () => { mockItemGetById: () => void; mockRepoGet: () => void; mockFolderGet: () => void; - mockFacet: () => void; + mockGetContent: () => void; contentItems: ContentItem[]; } => { const mockGet = jest.fn(); @@ -52,7 +62,7 @@ describe('content-item unarchive command', () => { const mockItemGetById = jest.fn(); const mockRepoGet = jest.fn(); const mockFolderGet = jest.fn(); - const mockFacet = jest.spyOn(fetchContentModule, 'getContent') as jest.Mock; + const mockGetContent = jest.spyOn(fetchContentModule, 'getContent') as jest.Mock; const item = new ContentItem({ id: '1', @@ -196,7 +206,7 @@ describe('content-item unarchive command', () => { mockItemUpdate.mockResolvedValue(item); mockItemsList.mockResolvedValue(new MockPage(ContentItem, contentItems)); - mockFacet.mockResolvedValue(contentItems); + mockGetContent.mockResolvedValue(contentItems); if (unarchiveError) { mockUnarchive.mockRejectedValue(new Error('Error')); @@ -213,7 +223,7 @@ describe('content-item unarchive command', () => { mockItemGetById, mockRepoGet, mockFolderGet, - mockFacet, + mockGetContent, contentItems }; }; @@ -282,7 +292,8 @@ describe('content-item unarchive command', () => { expect(spyOption).toHaveBeenCalledWith('logFile', { type: 'string', default: LOG_FILENAME, - describe: 'Path to a log file to write to.' + describe: 'Path to a log file to write to.', + coerce: coerceLog }); expect(spyOption).toHaveBeenCalledWith('name', { @@ -308,11 +319,24 @@ describe('content-item unarchive command', () => { jest.clearAllMocks(); }); + it('should use getDefaultLogPath for LOG_FILENAME with process.platform as default', function () { + LOG_FILENAME(); + + expect(getDefaultLogPath).toHaveBeenCalledWith('content-item', 'unarchive', process.platform); + }); + + it('should generate a log with coerceLog with the appropriate title', function () { + const logFile = coerceLog('filename.log'); + + expect(logFile).toEqual(expect.any(FileLog)); + expect(logFile.title).toMatch(/^dc\-cli test\-ver \- Content Items Unarchive Log \- ./); + }); + it('should unarchive all content', async () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockGet, mockFacet, mockUnarchive } = mockValues(); + const { mockGet, mockGetContent, mockUnarchive } = mockValues(); const argv = { ...yargArgs, @@ -321,7 +345,7 @@ describe('content-item unarchive command', () => { await handler(argv); expect(mockGet).toHaveBeenCalled(); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ARCHIVED }); expect(mockUnarchive).toHaveBeenCalledTimes(2); @@ -366,7 +390,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockUnarchive, mockFacet } = mockValues(); + const { mockUnarchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -375,7 +399,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { repoId: 'repo1', status: Status.ARCHIVED }); @@ -386,7 +410,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockUnarchive, mockFacet } = mockValues(); + const { mockUnarchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -395,7 +419,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { repoId: ['repo1', 'repo2'], status: Status.ARCHIVED }); @@ -406,7 +430,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockUnarchive, mockFacet } = mockValues(); + const { mockUnarchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -416,7 +440,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { repoId: 'repo123', folderId: 'folder1', status: Status.ARCHIVED @@ -428,7 +452,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockUnarchive, mockFacet } = mockValues(); + const { mockUnarchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -437,7 +461,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { folderId: ['folder1', 'folder1'], status: Status.ARCHIVED }); @@ -448,7 +472,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockUnarchive, mockFacet } = mockValues(); + const { mockUnarchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -458,7 +482,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item1', { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item1', { folderId: 'folder1', status: Status.ARCHIVED }); @@ -469,7 +493,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockUnarchive, mockFolderGet, mockItemsList, mockFacet } = mockValues(); + const { mockUnarchive, mockFolderGet, mockItemsList, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -479,7 +503,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).not.toHaveBeenCalled(); + expect(mockGetContent).not.toHaveBeenCalled(); expect(mockFolderGet).not.toHaveBeenCalled(); expect(mockItemsList).not.toHaveBeenCalled(); expect(mockUnarchive).not.toHaveBeenCalled(); @@ -489,10 +513,10 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockUnarchive, mockFacet } = mockValues(); + const { mockUnarchive, mockGetContent } = mockValues(); - (mockFacet as jest.Mock).mockReset(); - (mockFacet as jest.Mock).mockResolvedValue([]); + (mockGetContent as jest.Mock).mockReset(); + (mockGetContent as jest.Mock).mockResolvedValue([]); const argv = { ...yargArgs, @@ -502,7 +526,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item3', { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item3', { folderId: 'folder1', status: Status.ARCHIVED }); @@ -513,7 +537,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['n']); - const { mockUnarchive, mockFacet } = mockValues(); + const { mockUnarchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -523,7 +547,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item1', { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'name:item1', { folderId: 'folder1', status: Status.ARCHIVED }); @@ -534,7 +558,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockUnarchive, mockFacet } = mockValues(); + const { mockUnarchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -543,7 +567,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'schema:http://test.com', { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), 'schema:http://test.com', { status: Status.ARCHIVED }); expect(mockUnarchive).toHaveBeenCalledTimes(2); @@ -553,7 +577,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockUnarchive, mockFacet } = mockValues(true); + const { mockUnarchive, mockGetContent } = mockValues(true); const argv = { ...yargArgs, @@ -562,7 +586,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ARCHIVED }); expect(mockUnarchive).toHaveBeenCalledTimes(2); @@ -572,7 +596,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - const { mockUnarchive, mockFacet } = mockValues(true); + const { mockUnarchive, mockGetContent } = mockValues(true); const argv = { ...yargArgs, @@ -581,7 +605,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ARCHIVED }); expect(mockUnarchive).toHaveBeenCalledTimes(1); @@ -591,7 +615,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['input', 'ignored']); - const { mockUnarchive, mockFacet } = mockValues(); + const { mockUnarchive, mockGetContent } = mockValues(); const argv = { ...yargArgs, @@ -600,7 +624,7 @@ describe('content-item unarchive command', () => { }; await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { status: Status.ARCHIVED }); expect(mockUnarchive).toHaveBeenCalledTimes(2); @@ -614,7 +638,7 @@ describe('content-item unarchive command', () => { const log = '// Type log test file\n' + 'ARCHIVE 1\n' + 'ARCHIVE 2 delivery-key\n' + 'ARCHIVE idMissing\n'; const dir = dirname(logFileName); - if (!(await promisify(exists)(dir))) { + if (!existsSync(dir)) { await promisify(mkdir)(dir); } await promisify(writeFile)(logFileName, log); @@ -629,7 +653,6 @@ describe('content-item unarchive command', () => { const argv = { ...yargArgs, ...config, - logFile: LOG_FILENAME(), silent: true, force: true, revertLog: logFileName @@ -657,7 +680,7 @@ describe('content-item unarchive command', () => { 'ARCHIVE idMissing\n'; const dir = dirname(logFileName); - if (!(await promisify(exists)(dir))) { + if (!existsSync(dir)) { await promisify(mkdir)(dir); } await promisify(writeFile)(logFileName, log); @@ -672,7 +695,6 @@ describe('content-item unarchive command', () => { const argv = { ...yargArgs, ...config, - logFile: LOG_FILENAME(), silent: true, force: true, revertLog: logFileName @@ -702,7 +724,7 @@ describe('content-item unarchive command', () => { 'ARCHIVE idMissing\n'; const dir = dirname(logFileName); - if (!(await promisify(exists)(dir))) { + if (!existsSync(dir)) { await promisify(mkdir)(dir); } await promisify(writeFile)(logFileName, log); @@ -717,7 +739,6 @@ describe('content-item unarchive command', () => { const argv = { ...yargArgs, ...config, - logFile: LOG_FILENAME(), silent: true, force: true, revertLog: logFileName @@ -740,19 +761,20 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['input', 'ignored']); - const { mockFacet, mockUnarchive } = mockValues(true); + const { mockGetContent, mockUnarchive } = mockValues(true); - (mockFacet as jest.Mock).mockReset(); - (mockFacet as jest.Mock).mockRejectedValue(new Error('Simulated Error')); + (mockGetContent as jest.Mock).mockReset(); + (mockGetContent as jest.Mock).mockRejectedValue(new Error('Simulated error')); const argv = { ...yargArgs, ...config, folderId: 'folder1' }; - await handler(argv); - expect(mockFacet).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { + await expect(handler(argv)).rejects.toThrowErrorMatchingInlineSnapshot(`"Simulated error"`); + + expect(mockGetContent).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), undefined, { folderId: 'folder1', status: Status.ARCHIVED }); @@ -763,7 +785,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - if (await promisify(exists)(`temp_${process.env.JEST_WORKER_ID}/content-item-archive.log`)) { + if (existsSync(`temp_${process.env.JEST_WORKER_ID}/content-item-archive.log`)) { await promisify(unlink)(`temp_${process.env.JEST_WORKER_ID}/content-item-archive.log`); } @@ -771,25 +793,26 @@ describe('content-item unarchive command', () => { const log = '// Type log test file\n' + 'ARCHIVE 1\n' + 'ARCHIVE 2\n' + 'ARCHIVE idMissing'; const dir = dirname(logFileName); - if (!(await promisify(exists)(dir))) { + if (!existsSync(dir)) { await promisify(mkdir)(dir); } await promisify(writeFile)(logFileName, log); - const { mockUnarchive, mockItemGetById, mockFacet } = mockValues(true); + const { mockUnarchive, mockItemGetById, mockGetContent } = mockValues(true); const argv = { ...yargArgs, ...config, - logFile: LOG_FILENAME(), silent: true, force: true, revertLog: 'wrongFileName.log' }; - await handler(argv); + await expect(handler(argv)).rejects.toThrowErrorMatchingInlineSnapshot( + `"ENOENT: no such file or directory, open 'wrongFileName.log'"` + ); expect(mockItemGetById).not.toHaveBeenCalled(); - expect(mockFacet).not.toHaveBeenCalled(); + expect(mockGetContent).not.toHaveBeenCalled(); expect(mockUnarchive).not.toHaveBeenCalled(); }); @@ -797,7 +820,7 @@ describe('content-item unarchive command', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (readline as any).setResponses(['y']); - if (await promisify(exists)(`temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`)) { + if (existsSync(`temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`)) { await promisify(unlink)(`temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`); } @@ -806,7 +829,7 @@ describe('content-item unarchive command', () => { const argv = { ...yargArgs, ...config, - logFile: `temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`, + logFile: createLog(`temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`), id: '1' }; @@ -815,7 +838,7 @@ describe('content-item unarchive command', () => { expect(mockItemGetById).toHaveBeenCalled(); expect(mockUnarchive).toHaveBeenCalled(); - const logExists = await promisify(exists)(`temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`); + const logExists = existsSync(`temp_${process.env.JEST_WORKER_ID}/content-item-unarchive.log`); expect(logExists).toBeTruthy(); @@ -842,7 +865,7 @@ describe('content-item unarchive command', () => { const log = '// Type log test file\n' + 'ARCHIVE 1 delivery-key\n'; const dir = dirname(logFileName); - if (!(await promisify(exists)(dir))) { + if (!existsSync(dir)) { await promisify(mkdir)(dir); } await promisify(writeFile)(logFileName, log); @@ -879,7 +902,6 @@ describe('content-item unarchive command', () => { const argv = { ...yargArgs, ...config, - logFile: LOG_FILENAME(), silent: true, force: true, revertLog: logFileName, @@ -891,109 +913,30 @@ describe('content-item unarchive command', () => { expect(mockItemUpdate).toHaveBeenCalledTimes(1); expect((mockItemUpdate as jest.Mock).mock.calls[0][1].ignoreSchemaValidation).toBe(true); }); - }); - - describe('getContentItems tests', () => { - beforeEach(() => { - const { mockItemGetById, contentItems } = mockValues(); - - (mockItemGetById as jest.Mock).mockReset(); - (mockItemGetById as jest.Mock).mockResolvedValueOnce(contentItems[0]); - }); - it('should get content items by id', async () => { - const result = await getContentItems({ - client: dynamicContentClientFactory({ - ...config, - ...yargArgs - }), - id: '1', - hubId: 'hub1' - }); - - if (result) { - expect(result.contentItems.length).toBeGreaterThanOrEqual(1); - - expect(result.contentItems[0].id).toMatch('1'); - } - }); - - it('should get content items all', async () => { - const result = await getContentItems({ - client: dynamicContentClientFactory({ - ...config, - ...yargArgs - }), - hubId: 'hub1' - }); - - if (result) { - expect(result.contentItems.length).toBe(2); - } - }); - - it('should get content items by repo', async () => { - const result = await getContentItems({ - client: dynamicContentClientFactory({ - ...config, - ...yargArgs - }), - hubId: 'hub1', - repoId: 'repo1' - }); - - if (result) { - expect(result.contentItems.length).toBe(2); - } - }); - - it('should get content items by folder', async () => { - const result = await getContentItems({ - client: dynamicContentClientFactory({ - ...config, - ...yargArgs - }), - hubId: 'hub1', - folderId: 'folder1' - }); - if (result) { - expect(result.contentItems.length).toBe(2); - } - }); - }); - - describe('processItems tests', () => { - it('should unarchive content items', async () => { - const { contentItems, mockUnarchive } = mockValues(); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (readline as any).setResponses(['y']); - - await processItems({ - contentItems, - allContent: true, - missingContent: false, - logFile: './logFile.log' + it('should not archive content items', async () => { + const { mockItemGetById, mockUnarchive } = mockValues(); + const logFile = new FileLog(); + const mockAppendFile = jest.fn(); + logFile.open = jest.fn().mockImplementation(() => { + return { + appendLine: mockAppendFile + }; }); + const argv = { + ...yargArgs, + ...config, + id: 'repo123', + logFile + }; - expect(mockUnarchive).toHaveBeenCalledTimes(2); - - if (await promisify(exists)('./logFile.log')) { - await promisify(unlink)('./logFile.log'); - } - }); - - it('should not unarchive content items', async () => { - jest.spyOn(global.console, 'log'); + (mockItemGetById as jest.Mock).mockResolvedValue([]); - await processItems({ - contentItems: [], - allContent: true, - missingContent: false - }); + await handler(argv); - expect(console.log).toHaveBeenCalled(); - expect(console.log).toHaveBeenLastCalledWith('Nothing found to unarchive, aborting.'); + expect(mockUnarchive).not.toHaveBeenCalled(); + expect(mockAppendFile).toHaveBeenCalled(); + expect(mockAppendFile).toHaveBeenLastCalledWith('Nothing found to unarchive, aborting'); }); }); }); diff --git a/src/commands/content-item/unarchive.ts b/src/commands/content-item/unarchive.ts index 2b58d8db..4f032dd2 100644 --- a/src/commands/content-item/unarchive.ts +++ b/src/commands/content-item/unarchive.ts @@ -3,12 +3,15 @@ import { ConfigurationParameters } from '../configure'; import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; import { ArchiveLog } from '../../common/archive/archive-log'; import { confirmAllContent } from '../../common/content-item/confirm-all-content'; -import UnarchiveOptions from '../../common/archive/unarchive-options'; import { ContentItem, DynamicContent, Status } from 'dc-management-sdk-js'; -import { getDefaultLogPath } from '../../common/log-helpers'; -import { withOldFilters } from '../../common/filter/facet'; +import { createLog, getDefaultLogPath } from '../../common/log-helpers'; +import { Facet, withOldFilters } from '../../common/filter/facet'; import { getContent } from '../../common/filter/fetch-content'; import { isEqual } from 'lodash'; +import { getContentByIds } from '../../common/content-item/get-content-items-by-ids'; +import { FileLog } from '../../common/file-log'; +import { progressBar } from '../../common/progress-bar/progress-bar'; +import ContentItemUnarchiveOptions from '../../common/archive/content-item-unarchive-options'; export const command = 'unarchive [id]'; @@ -17,6 +20,8 @@ export const desc = 'Unarchive Content Items'; export const LOG_FILENAME = (platform: string = process.platform): string => getDefaultLogPath('content-item', 'unarchive', platform); +export const coerceLog = (logFile: string): FileLog => createLog(logFile, 'Content Items Unarchive Log'); + export const builder = (yargs: Argv): void => { yargs .positional('id', { @@ -65,7 +70,8 @@ export const builder = (yargs: Argv): void => { .option('logFile', { type: 'string', default: LOG_FILENAME, - describe: 'Path to a log file to write to.' + describe: 'Path to a log file to write to.', + coerce: coerceLog }) .option('name', { type: 'string', @@ -82,126 +88,78 @@ export const builder = (yargs: Argv): void => { }); }; -export const getContentItems = async ({ +const getContentToUnarchiveWithIds = async ({ client, - id, - hubId, - repoId, - folderId, - revertLog, - facet + ids, + revertItems }: { client: DynamicContent; - id?: string | string[]; - hubId: string; - repoId?: string | string[]; - folderId?: string | string[]; - revertLog?: string; - facet?: string; -}): Promise<{ contentItems: ContentItem[]; missingContent: boolean }> => { - try { - let contentItems: ContentItem[] = []; - let revertItems: string[][] = []; - - if (revertLog != null) { - const log = await new ArchiveLog().loadFromFile(revertLog); - revertItems = log.getData('ARCHIVE').map(args => args.split(' ')); - id = revertItems.map(item => item[0]); - } + ids: string[]; + revertItems?: string[][]; +}) => { + let contentItemIds = ids; - if (id) { - const itemIds = Array.isArray(id) ? id : [id]; - const items: ContentItem[] = []; - - for (let i = 0; i < itemIds.length; i++) { - try { - const contentItem = await client.contentItems.get(itemIds[i]); - - if (revertItems.length === itemIds.length) { - contentItem.body._meta.deliveryKey = revertItems[i][1] || null; - const archivedDeliveryKeys: string[] = revertItems[i][2] ? revertItems[i][2]?.split(',') : []; - if (archivedDeliveryKeys?.length) { - contentItem.body._meta.deliveryKeys = { - values: archivedDeliveryKeys.map(deliveryKey => ({ value: deliveryKey })) - }; - } - } - items.push(contentItem); - } catch { - // Missing item. - } - } - - contentItems.push(...items.filter(item => item.status === Status.ARCHIVED)); + if (revertItems?.length) { + contentItemIds = revertItems.map(item => item[0]); + } - return { - contentItems, - missingContent: contentItems.length != itemIds.length - }; + const contentItems = await getContentByIds(client, contentItemIds); + const contentItemsWithRevert = contentItems.map(item => { + const revertItem = revertItems?.find(revertItem => item.id === revertItem[0]); + if (revertItem) { + const [, key, keys] = revertItem; + const deliveryKeys = keys?.split(',') || []; + item.body._meta.deliveryKey = key || null; + if (keys?.length) { + item.body._meta.deliveryKeys = { + values: deliveryKeys.map(k => ({ value: k })) + }; + } } + return item; + }); - const hub = await client.hubs.get(hubId); - - contentItems = await getContent(client, hub, facet, { repoId, folderId, status: Status.ARCHIVED }); - - // Delete the delivery keys, as the unarchive will attempt to reassign them if present. - contentItems.forEach(item => { - delete item.body._meta.deliveryKey; - delete item.body._meta.deliveryKeys; - }); + return contentItemsWithRevert.filter(item => item.status === Status.ARCHIVED); +}; - return { contentItems, missingContent: false }; - } catch (err) { - console.log(err); +const getContentToUnarchiveWithFacet = async ({ + client, + hubId, + facet, + repoId, + folderId +}: { + client: DynamicContent; + hubId: string; + facet?: Facet | string | undefined; + repoId?: string | string[]; + folderId?: string | string[]; +}) => { + const hub = await client.hubs.get(hubId); + const contentItems = await getContent(client, hub, facet, { repoId, folderId, status: Status.ARCHIVED }); + + // Delete the delivery keys, as the unarchive will attempt to reassign them if present. + contentItems.forEach(item => { + delete item.body._meta.deliveryKey; + delete item.body._meta.deliveryKeys; + }); - return { - contentItems: [], - missingContent: false - }; - } + return contentItems; }; -export const processItems = async ({ +const processItems = async ({ contentItems, - force, - silent, - logFile, - allContent, - missingContent, + log, ignoreError, ignoreSchemaValidation }: { contentItems: ContentItem[]; - force?: boolean; - silent?: boolean; - logFile?: string; - allContent: boolean; - missingContent: boolean; + log: FileLog; ignoreError?: boolean; ignoreSchemaValidation?: boolean; -}): Promise => { - if (contentItems.length == 0) { - console.log('Nothing found to unarchive, aborting.'); - return; - } - - console.log('The following content items will be unarchived:'); - contentItems.forEach((contentItem: ContentItem) => { - console.log(` ${contentItem.label} (${contentItem.id})`); - }); - console.log(`Total: ${contentItems.length}`); - - if (!force) { - const yes = await confirmAllContent('unarchive', 'content item', allContent, missingContent); - if (!yes) { - return; - } - } - - const timestamp = Date.now().toString(); - const log = new ArchiveLog(`Content Items Unarchive Log - ${timestamp}\n`); - - let successCount = 0; +}): Promise<{ failedUnarchives: ContentItem[] }> => { + const progress = progressBar(contentItems.length, 0, { title: 'Unarchiving content items' }); + const failedUnarchives: ContentItem[] = []; for (let i = 0; i < contentItems.length; i++) { try { @@ -221,71 +179,100 @@ export const processItems = async ({ } log.addAction('UNARCHIVE', `${contentItems[i].id}\n`); - successCount++; + progress.increment(); } catch (e) { + failedUnarchives.push(contentItems[i]); + progress.increment(); log.addComment(`UNARCHIVE FAILED: ${contentItems[i].id}`); log.addComment(e.toString()); if (ignoreError) { log.warn(`Failed to unarchive ${contentItems[i].label} (${contentItems[i].id}), continuing.`, e); } else { + progress.stop(); log.error(`Failed to unarchive ${contentItems[i].label} (${contentItems[i].id}), aborting.`, e); break; } } } - if (!silent && logFile) { - await log.writeToFile(logFile.replace('', timestamp)); - } + progress.stop(); - console.log(`Unarchived ${successCount} content items.`); + return { failedUnarchives }; }; -export const handler = async (argv: Arguments): Promise => { +export const handler = async ( + argv: Arguments +): Promise => { const { id, logFile, force, silent, ignoreError, hubId, revertLog, repoId, folderId, ignoreSchemaValidation } = argv; + const log = logFile.open(); const facet = withOldFilters(argv.facet, argv); const client = dynamicContentClientFactory(argv); - const allContent = !id && !facet && !revertLog && !folderId && !repoId; if (repoId && id) { - console.log('ID of content item is specified, ignoring repository ID'); + log.appendLine('ID of content item is specified, ignoring repository ID'); } if (id && facet) { - console.log('Please specify either a facet or an ID - not both.'); + log.appendLine('Please specify either a facet or an ID - not both.'); return; } if (repoId && folderId) { - console.log('Folder is specified, ignoring repository ID'); + log.appendLine('Folder is specified, ignoring repository ID'); } if (allContent) { - console.log('No filter was given, archiving all content'); + log.appendLine('No filter was given, archiving all content'); } - const { contentItems, missingContent } = await getContentItems({ - client, - id, - hubId, - repoId, - folderId, - revertLog, - facet - }); + let ids: string[] = []; + let revertItems: string[][] = []; + + if (id) { + ids = Array.isArray(id) ? id : [id]; + } + + if (revertLog) { + const log = await new ArchiveLog().loadFromFile(revertLog); + revertItems = log.getData('ARCHIVE').map(args => args.split(' ')); + ids = revertItems.map(item => item[0]); + } - await processItems({ + const contentItems = ids.length + ? await getContentToUnarchiveWithIds({ client, ids, revertItems }) + : await getContentToUnarchiveWithFacet({ client, hubId, facet, repoId, folderId }); + + if (!contentItems.length) { + log.appendLine('Nothing found to unarchive, aborting'); + return; + } + + const missingContentItems = ids.length > 0 ? Boolean(ids.length !== contentItems.length) : false; + logFile.appendLine(`Found ${contentItems.length} content items to unarchive`); + + if (!force) { + const yes = await confirmAllContent('unarchive', 'content item', allContent, missingContentItems); + if (!yes) { + return; + } + } + + const { failedUnarchives } = await processItems({ contentItems, - force, - silent, - logFile, - allContent, - missingContent, + log, ignoreError, ignoreSchemaValidation }); + + const failedUnarchiveMsg = failedUnarchives.length + ? `with ${failedUnarchives.length} failed archives - check logs for details` + : ``; + + log.appendLine(`Unarchived content items ${failedUnarchiveMsg}`); + + await log.close(!silent); }; // log format: diff --git a/src/common/archive/content-item-unarchive-options.ts b/src/common/archive/content-item-unarchive-options.ts new file mode 100644 index 00000000..f42f9ef4 --- /dev/null +++ b/src/common/archive/content-item-unarchive-options.ts @@ -0,0 +1,15 @@ +import { FileLog } from '../file-log'; + +export default interface ContentItemUnarchiveOptions { + id?: string; + schemaId?: string | string[]; + repoId?: string | string[]; + folderId?: string | string[]; + revertLog?: string; + facet?: string; + logFile: FileLog; + force?: boolean; + silent?: boolean; + ignoreError?: boolean; + ignoreSchemaValidation?: boolean; +} From 0e9444cee55bf9b3529913ec9e6ec6de80ef7a57 Mon Sep 17 00:00:00 2001 From: Neil Mistry <30687441+neilmistryamplience@users.noreply.github.com> Date: Wed, 5 Nov 2025 09:45:42 +0000 Subject: [PATCH 40/43] docs: content item sync examples with --destinationHubId --- docs/CONTENT-ITEM.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/CONTENT-ITEM.md b/docs/CONTENT-ITEM.md index a597d88d..020e690a 100644 --- a/docs/CONTENT-ITEM.md +++ b/docs/CONTENT-ITEM.md @@ -29,7 +29,7 @@ Return to [README.md](../README.md) for information on other command categories. - [tree](#tree) - [publish](#publish) - [unpublish](#unpublish) - - [sync] (#sync) + - [sync](#sync) @@ -492,7 +492,7 @@ dc-cli content-item unpublish --facet "locale:en-GB,label:homepage" Sync content items between content hubs. You can sync all items or specify individual content items by ID. ```bash -dc-cli content-item sync [id] +dc-cli content-item sync [id] --destinationHubId ``` If no `id` is provided, all content items in all content repositories in the specified hub will be unpublished. @@ -526,17 +526,17 @@ If no `id` is provided, all content items in all content repositories in the spe ##### Sync a specific content item by ID ```bash -dc-cli content-item sync ba967c23-4c22-4617-a009-0f976d77b81c +dc-cli content-item sync ba967c23-4c22-4617-a009-0f976d77b81c --destinationHubId 1908eb70436d553464f48cb3 ``` ##### Sync all content in a specific repository ```bash -dc-cli content-item sync --repoId 67d1c1cf642fa239dbe15165 +dc-cli content-item sync --repoId 67d1c1cf642fa239dbe15165 --destinationHubId 1908eb70436d553464f48cb3 ``` ##### Use facets to sync filtered content ```bash -dc-cli content-item sync --facet "locale:en-GB,label:homepage" +dc-cli content-item sync --facet "locale:en-GB,label:homepage" --destinationHubId 1908eb70436d553464f48cb3 ``` From 93576c20031533d14060938588abe95c59df04d3 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Wed, 5 Nov 2025 16:38:38 +0000 Subject: [PATCH 41/43] chore: bumping management sdk to latest version --- package-lock.json | 22 ++++++++++++++++++---- package.json | 2 +- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index 7dd51dc1..28147549 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,7 +15,7 @@ "bottleneck": "2.19.5", "chalk": "2.4.2", "cli-progress": "3.12.0", - "dc-management-sdk-js": "3.1.0", + "dc-management-sdk-js": "3.2.0", "enquirer": "2.3.6", "fs-extra": "10.1.0", "graceful-fs": "4.2.11", @@ -124,6 +124,7 @@ "integrity": "sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", @@ -939,6 +940,7 @@ "integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "env-paths": "^2.2.1", "import-fresh": "^3.3.0", @@ -3902,6 +3904,7 @@ "integrity": "sha512-LEwC7o1ifqg/6r2gn9Dns0f1rhK+fPFDoMiceTJ6kWmVk6bgXBI/9IOWfVan4WiAavK9pIVWdX0/e3J+eEUh5A==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "undici-types": "~6.19.2" } @@ -4004,6 +4007,7 @@ "integrity": "sha512-Tqoa05bu+t5s8CTZFaGpCH2ub3QeT9YDkXbPd3uQ4SfsLoh1/vv2GEYAioPoxCWJJNsenXlC88tRjwoHNts1oQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.24.1", "@typescript-eslint/types": "8.24.1", @@ -4639,6 +4643,7 @@ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -4806,6 +4811,7 @@ "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", "license": "MIT", + "peer": true, "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.4", @@ -5015,6 +5021,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "caniuse-lite": "^1.0.30001735", "electron-to-chromium": "^1.5.204", @@ -5584,9 +5591,9 @@ } }, "node_modules/dc-management-sdk-js": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/dc-management-sdk-js/-/dc-management-sdk-js-3.1.0.tgz", - "integrity": "sha512-UP0C0V9b6f2Te8DiJBWmPIOFpSIwjouf7uIGWkbwK0/AF5EghFYeDnF7JYV1z1mpAINntNkjJ+QXIhzuaLo2/A==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/dc-management-sdk-js/-/dc-management-sdk-js-3.2.0.tgz", + "integrity": "sha512-S8aoObfEYlTtOvoo1Gt7Jt3ezNc4EDoK1t7fMPpk38XGuM8w12hSuKo6QhUpJZrV2agzbsyli2G9a4NgIsuT8g==", "license": "Apache-2.0", "dependencies": { "axios": "1.12.2", @@ -5935,6 +5942,7 @@ "integrity": "sha512-LSehfdpgMeWcTZkWZVIJl+tkZ2nuSkyyB9C27MZqFWXuph7DvaowgcTvKqxvpLW1JZIk8PN7hFY3Rj9LQ7m7lg==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", @@ -5996,6 +6004,7 @@ "integrity": "sha512-lZBts941cyJyeaooiKxAtzoPHTN+GbQTJFAIdQbRhA4/8whaAraEh47Whw/ZFfrjNSnlAxqfm9i0XVAEkULjCw==", "dev": true, "license": "MIT", + "peer": true, "bin": { "eslint-config-prettier": "build/bin/cli.js" }, @@ -7989,6 +7998,7 @@ "integrity": "sha512-y2mfcJywuTUkvLm2Lp1/pFX8kTgMO5yyQGq/Sk/n2mN7XWYp4JsCZ/QXW34M8YScgk8bPZlREH04f6blPnoHnQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@jest/core": "30.0.5", "@jest/types": "30.0.5", @@ -13048,6 +13058,7 @@ "integrity": "sha512-hPpFQvHwL3Qv5AdRvBFMhnKo4tYxp0ReXiPn2bxkiohEX6mBeBwEpBSQTkD458RaaDKQMYSp4hX4UtfUTA5wDw==", "dev": true, "license": "MIT", + "peer": true, "bin": { "prettier": "bin/prettier.cjs" }, @@ -14116,6 +14127,7 @@ "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -14264,6 +14276,7 @@ "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz", "integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==", "dev": true, + "peer": true, "dependencies": { "@cspotcode/source-map-support": "^0.8.0", "@tsconfig/node10": "^1.0.7", @@ -14369,6 +14382,7 @@ "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", "dev": true, "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" diff --git a/package.json b/package.json index e91aa086..dd4ccf72 100644 --- a/package.json +++ b/package.json @@ -115,7 +115,7 @@ "bottleneck": "2.19.5", "chalk": "2.4.2", "cli-progress": "3.12.0", - "dc-management-sdk-js": "3.1.0", + "dc-management-sdk-js": "3.2.0", "enquirer": "2.3.6", "fs-extra": "10.1.0", "graceful-fs": "4.2.11", From e20728e6300008bb24a2c14a63a164807d27509e Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Thu, 6 Nov 2025 08:44:41 +0000 Subject: [PATCH 42/43] fix: removing job list command --- src/commands/job/list.spec.ts | 62 ----------------------------------- src/commands/job/list.ts | 38 --------------------- 2 files changed, 100 deletions(-) delete mode 100644 src/commands/job/list.spec.ts delete mode 100644 src/commands/job/list.ts diff --git a/src/commands/job/list.spec.ts b/src/commands/job/list.spec.ts deleted file mode 100644 index 4e2b24d9..00000000 --- a/src/commands/job/list.spec.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { command, handler, itemMapFn } from './list'; -import DataPresenter from '../../view/data-presenter'; -import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; -import { Job } from 'dc-management-sdk-js'; -import { DEFAULT_SIZE } from '../../common/dc-management-sdk-js/paginator'; -import MockPage from '../../common/dc-management-sdk-js/mock-page'; - -jest.mock('../../services/dynamic-content-client-factory'); -jest.mock('../../view/data-presenter'); - -describe('job list command', () => { - it('should command should defined', function () { - expect(command).toEqual('list'); - }); - - describe('handler', () => { - const yargArgs = { - $0: 'test', - _: ['test'], - json: true - }; - const config = { - clientId: 'client-id', - clientSecret: 'client-id', - hubId: '67d1c1c7642fa239dbe15164' - }; - it('should list jobs', async () => { - const pagingOptions = { sort: 'createdDate,desc' }; - const listResponse = new MockPage(Job, [ - new Job({ id: '68e5289f0aba3024bde00001' }), - new Job({ id: '68e5289f0aba3024bde00002' }) - ]); - const mockListJobs = jest.fn().mockResolvedValue(listResponse); - const mockGetHub = jest.fn().mockResolvedValue({ - related: { - jobs: { - list: mockListJobs - } - } - }); - const mockDataPresenter = DataPresenter as jest.Mock; - - (dynamicContentClientFactory as jest.Mock).mockReturnValue({ - hubs: { - get: mockGetHub - } - }); - - const argv = { ...yargArgs, ...config, ...pagingOptions }; - await handler(argv); - - expect(mockGetHub).toHaveBeenCalledWith('67d1c1c7642fa239dbe15164'); - expect(mockListJobs).toHaveBeenCalledWith({ size: DEFAULT_SIZE, ...pagingOptions }); - - expect(mockDataPresenter).toHaveBeenCalledWith([ - { id: '68e5289f0aba3024bde00001' }, - { id: '68e5289f0aba3024bde00002' } - ]); - expect(mockDataPresenter.mock.instances[0].render).toHaveBeenCalledWith({ itemMapFn, json: argv.json }); - }); - }); -}); diff --git a/src/commands/job/list.ts b/src/commands/job/list.ts deleted file mode 100644 index 55054319..00000000 --- a/src/commands/job/list.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { Arguments } from 'yargs'; -import DataPresenter, { RenderingArguments, RenderingOptions } from '../../view/data-presenter'; -import { ConfigurationParameters } from '../configure'; -import dynamicContentClientFactory from '../../services/dynamic-content-client-factory'; -import { extractSortable, PagingParameters } from '../../common/yargs/sorting-options'; -import paginator from '../../common/dc-management-sdk-js/paginator'; -import { Job } from 'dc-management-sdk-js'; -import { CommandOptions } from '../../interfaces/command-options.interface'; - -export const command = 'list'; - -export const desc = 'List jobs'; - -export const builder: CommandOptions = { - ...RenderingOptions -}; - -export const itemMapFn = ({ id, label, status, jobType, originHubId, destinationHubId }: Job): object => ({ - id, - label, - status, - jobType, - originHubId, - destinationHubId -}); - -export const handler = async ( - argv: Arguments -): Promise => { - const client = dynamicContentClientFactory(argv); - const hub = await client.hubs.get(argv.hubId); - const contentTypeList = await paginator(hub.related.jobs.list, extractSortable(argv)); - - new DataPresenter(contentTypeList.map(value => value.toJSON())).render({ - json: argv.json, - itemMapFn: itemMapFn - }); -}; From 710f53dec618f9f70ae2d8f1c070259472109d78 Mon Sep 17 00:00:00 2001 From: Ben Pearey Date: Fri, 14 Nov 2025 14:05:45 +0000 Subject: [PATCH 43/43] test: increasing timeout interval to avoid race condition --- src/common/burstable-queue/burstable-queue.spec.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/common/burstable-queue/burstable-queue.spec.ts b/src/common/burstable-queue/burstable-queue.spec.ts index ba2bfbe6..6802cdf8 100644 --- a/src/common/burstable-queue/burstable-queue.spec.ts +++ b/src/common/burstable-queue/burstable-queue.spec.ts @@ -4,6 +4,7 @@ import { setTimeout } from 'node:timers/promises'; describe('burstable-queue', () => { it('should schedule task and execute them with an initial burst', async () => { const interval = 500; + const timeoutInterval = interval + 10; const burstableQueue = new BurstableQueue({ concurrency: 1, minTime: 0, @@ -23,19 +24,19 @@ describe('burstable-queue', () => { expect(burstableQueue.size()).toEqual(8); expect(completeTasks).toHaveLength(0); - await setTimeout(interval); + await setTimeout(timeoutInterval); expect(burstableQueue.size()).toEqual(4); expect(completeTasks).toHaveLength(4); - await setTimeout(interval); + await setTimeout(timeoutInterval); expect(burstableQueue.size()).toEqual(3); expect(completeTasks).toHaveLength(5); - await setTimeout(interval); + await setTimeout(timeoutInterval); expect(burstableQueue.size()).toEqual(2); expect(completeTasks).toHaveLength(6); - await setTimeout(interval); + await setTimeout(timeoutInterval); expect(burstableQueue.size()).toEqual(1); expect(completeTasks).toHaveLength(7); - await setTimeout(interval); + await setTimeout(timeoutInterval); expect(burstableQueue.size()).toEqual(0); expect(completeTasks).toHaveLength(8); });