diff --git a/.github/workflows/run_tests.yml b/.github/workflows/CI.yml similarity index 75% rename from .github/workflows/run_tests.yml rename to .github/workflows/CI.yml index 0ca708f..34a1b1f 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/CI.yml @@ -1,11 +1,11 @@ -name: Run Tests +name: Continuous Integration on: pull_request: types: [opened, reopened, synchronize] jobs: - run_tests: + Continuous_Integration: runs-on: ubuntu-latest steps: @@ -19,11 +19,14 @@ jobs: with: deno-version: latest + - name: Check formatting + run: | + deno fmt --check + - name: Run Tests run: | deno test \ --allow-read \ --allow-write \ --allow-env \ - --allow-run \ - --trace-leaks + --allow-run diff --git a/README.md b/README.md index 02ab273..c17c8bf 100644 --- a/README.md +++ b/README.md @@ -5,11 +5,12 @@ This project helps decode Salesforce Flows by translating their raw XML definition files into human-understandable UML diagrams. These visualizations clarify the flow's structure and logic, making documentation and code review -significantly easier. It supports generating diagrams using PlantUML, Graphviz, and -Mermaid, and can even highlight changes between different versions of a flow by -processing Git diffs. +significantly easier. It supports generating diagrams using PlantUML, Graphviz, +and Mermaid, and can even highlight changes between different versions of a flow +by processing Git diffs. -Flow Lens is available on JSR, the JavaScript Registry. You can find the package at [jsr.io/@goog/flow-lens](https://jsr.io/@goog/flow-lens). +Flow Lens is available on JSR, the JavaScript Registry. You can find the package +at [jsr.io/@goog/flow-lens](https://jsr.io/@goog/flow-lens). This is not an officially supported Google product. This project is not eligible for the @@ -21,7 +22,8 @@ for the Graphviz, and Mermaid. - **Handles Git diffs:** Can process changes between two Git commits, highlighting added, modified, and deleted elements in the resulting diagram. -- **GitHub Action integration:** Automatically posts flow diagrams as comments on pull requests. +- **GitHub Action integration:** Automatically posts flow diagrams as comments + on pull requests. ## Usage @@ -43,8 +45,11 @@ available: Flow Lens supports two output modes: -1. **json mode (default):** Generates a JSON file containing the UML diagram(s) that can be used for further processing. -2. **github_action mode:** Automatically posts comments with flow diagrams on pull requests when used in a GitHub Actions workflow. When using this mode, you must specify `mermaid` as the diagram tool. +1. **json mode (default):** Generates a JSON file containing the UML diagram(s) + that can be used for further processing. +2. **github_action mode:** Automatically posts comments with flow diagrams on + pull requests when used in a GitHub Actions workflow. When using this mode, + you must specify `mermaid` as the diagram tool. **Example using file path (json mode):** @@ -80,7 +85,8 @@ deno run \ ### Setting up a GitHub Action -You can set up a GitHub Action to automatically generate and post flow diagrams as comments on pull requests. Here's an example workflow configuration: +You can set up a GitHub Action to automatically generate and post flow diagrams +as comments on pull requests. Here's an example workflow configuration: ```yaml name: Generate Flow Preview @@ -122,13 +128,16 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} ``` -When using the GitHub Action mode, Flow Lens will automatically post a comment on the pull request with the old (if applicable) and new versions of the flow whenever a pull request is created or updated. This makes it easy to visualize flow changes directly in the pull request review process. +When using the GitHub Action mode, Flow Lens will automatically post a comment +on the pull request with the old (if applicable) and new versions of the flow +whenever a pull request is created or updated. This makes it easy to visualize +flow changes directly in the pull request review process. ## Output -When using the json mode, the output is a JSON file containing the generated UML diagram(s). The structure -will contain the file paths and their associated old (if applicable) and new UML -strings. +When using the json mode, the output is a JSON file containing the generated UML +diagram(s). The structure will contain the file paths and their associated old +(if applicable) and new UML strings. ```json [ diff --git a/deno.json b/deno.json index d37cae4..364655f 100644 --- a/deno.json +++ b/deno.json @@ -1,6 +1,6 @@ { "name": "@goog/flow-lens", - "version": "0.1.8", + "version": "0.1.9", "license": "Apache", "exports": "./src/main/main.ts", "imports": { diff --git a/src/main/argument_processor.ts b/src/main/argument_processor.ts index 04f7443..e8d14ce 100644 --- a/src/main/argument_processor.ts +++ b/src/main/argument_processor.ts @@ -74,9 +74,11 @@ export enum Mode { */ export const ERROR_MESSAGES = { unsupportedDiagramTool: (diagramTool: string) => - `Unsupported diagram tool: ${diagramTool}. Valid options are: ${Object.values( - DiagramTool - ).join(", ")}`, + `Unsupported diagram tool: ${diagramTool}. Valid options are: ${ + Object.values( + DiagramTool, + ).join(", ") + }`, filePathDoesNotExist: (filePath: string) => `filePath does not exist: ${filePath}`, invalidOutputFileName: (outputFileName: string) => @@ -92,9 +94,11 @@ export const ERROR_MESSAGES = { outputFileNameRequired: "outputFileName is required for JSON mode", outputDirectoryRequired: "outputDirectory is required for JSON mode", unsupportedMode: (mode: string) => - `Unsupported mode: ${mode}. Valid options are: ${Object.values(Mode).join( - ", " - )}`, + `Unsupported mode: ${mode}. Valid options are: ${ + Object.values(Mode).join( + ", ", + ) + }`, header: "The following errors were encountered:", }; @@ -170,7 +174,7 @@ export class ArgumentProcessor { !Object.values(DiagramTool).includes(lowerCaseDiagramTool as DiagramTool) ) { this.errorsEncountered.push( - ERROR_MESSAGES.unsupportedDiagramTool(this.config.diagramTool) + ERROR_MESSAGES.unsupportedDiagramTool(this.config.diagramTool), ); } } @@ -182,7 +186,7 @@ export class ArgumentProcessor { !Object.values(Mode).includes(lowerCaseMode as Mode) ) { this.errorsEncountered.push( - ERROR_MESSAGES.unsupportedMode(this.config.mode) + ERROR_MESSAGES.unsupportedMode(this.config.mode), ); } } @@ -194,7 +198,7 @@ export class ArgumentProcessor { for (const filePath of this.config.filePath) { if (!fs.existsSync(filePath)) { this.errorsEncountered.push( - ERROR_MESSAGES.filePathDoesNotExist(filePath) + ERROR_MESSAGES.filePathDoesNotExist(filePath), ); } } @@ -208,7 +212,7 @@ export class ArgumentProcessor { const regex = VALID_OUTPUT_FILE_NAME_REGEX; if (!regex.test(this.config.outputFileName)) { this.errorsEncountered.push( - ERROR_MESSAGES.invalidOutputFileName(this.config.outputFileName) + ERROR_MESSAGES.invalidOutputFileName(this.config.outputFileName), ); } } @@ -220,7 +224,7 @@ export class ArgumentProcessor { } if (!fs.existsSync(this.config.outputDirectory)) { this.errorsEncountered.push( - ERROR_MESSAGES.invalidOutputDirectory(this.config.outputDirectory) + ERROR_MESSAGES.invalidOutputDirectory(this.config.outputDirectory), ); } } @@ -231,7 +235,7 @@ export class ArgumentProcessor { !(this.config.gitDiffFromHash && this.config.gitDiffToHash) ) { this.errorsEncountered.push( - ERROR_MESSAGES.filePathOrGitDiffFromAndToHashRequired + ERROR_MESSAGES.filePathOrGitDiffFromAndToHashRequired, ); } } @@ -243,7 +247,7 @@ export class ArgumentProcessor { (this.config.gitDiffFromHash || this.config.gitDiffToHash) ) { this.errorsEncountered.push( - ERROR_MESSAGES.filePathAndGitDiffFromAndToHashMutuallyExclusive + ERROR_MESSAGES.filePathAndGitDiffFromAndToHashMutuallyExclusive, ); } } @@ -254,7 +258,7 @@ export class ArgumentProcessor { (this.config.gitDiffFromHash && !this.config.gitDiffToHash) ) { this.errorsEncountered.push( - ERROR_MESSAGES.gitDiffFromAndToHashMustBeSpecifiedTogether + ERROR_MESSAGES.gitDiffFromAndToHashMustBeSpecifiedTogether, ); } } @@ -262,7 +266,7 @@ export class ArgumentProcessor { private checkForErrors() { if (this.errorsEncountered.length > 0) { const errors: string[] = this.errorsEncountered.map( - (error) => `- ${error}` + (error) => `- ${error}`, ); errors.unshift(ERROR_MESSAGES.header); throw new Error(errors.join("\n")); diff --git a/src/main/flow_file_change_detector.ts b/src/main/flow_file_change_detector.ts index a6bfe4d..b5b1615 100644 --- a/src/main/flow_file_change_detector.ts +++ b/src/main/flow_file_change_detector.ts @@ -43,7 +43,7 @@ const GIT_COMMANDS = { getFileContent: ( filePath: string, commitHash: string, - repo: string | undefined + repo: string | undefined, ) => `git ${repo ? `-C ${repo}` : ""} show ${commitHash}:${filePath}`, }; @@ -82,11 +82,11 @@ export class FlowFileChangeDetector { fromOrTo === "old" ? (Configuration.getInstance().gitDiffFromHash as string) : (Configuration.getInstance().gitDiffToHash as string), - Configuration.getInstance().gitRepo + Configuration.getInstance().gitRepo, ); } catch (error: unknown) { throw new Error( - ERROR_MESSAGES.unableToGetFileContent(filePath, error as Error) + ERROR_MESSAGES.unableToGetFileContent(filePath, error as Error), ); } return fileContent.toString(); @@ -131,15 +131,15 @@ export class FlowFileChangeDetector { GIT_COMMANDS.diff( Configuration.getInstance().gitDiffFromHash!, Configuration.getInstance().gitDiffToHash!, - Configuration.getInstance().gitRepo - ) + Configuration.getInstance().gitRepo, + ), ); } private executeGetFileContentCommand( filePath: string, commitHash: string, - repo: string | undefined + repo: string | undefined, ) { return execSync(GIT_COMMANDS.getFileContent(filePath, commitHash, repo)); } @@ -149,7 +149,7 @@ export class FlowFileChangeDetector { .split(EOL) .filter( (filePath) => - filePath && filePath.toLowerCase().endsWith(FLOW_FILE_EXTENSION) + filePath && filePath.toLowerCase().endsWith(FLOW_FILE_EXTENSION), ); } } diff --git a/src/main/flow_parser.ts b/src/main/flow_parser.ts index 275a3af..8ba750f 100644 --- a/src/main/flow_parser.ts +++ b/src/main/flow_parser.ts @@ -117,7 +117,7 @@ export class FlowParser { } else { resolve(result); } - } + }, ); }); } @@ -137,7 +137,7 @@ export class FlowParser { this.beingParsed.apexPluginCalls = ensureArray(flow.apexPluginCalls); this.beingParsed.assignments = ensureArray(flow.assignments); this.beingParsed.collectionProcessors = ensureArray( - flow.collectionProcessors + flow.collectionProcessors, ); this.beingParsed.customErrors = ensureArray(flow.customErrors); setCustomErrorMessages(this.beingParsed.customErrors); @@ -291,14 +291,14 @@ export class FlowParser { from: flowTypes.FlowNode, connection: flowTypes.FlowConnector, isFault: boolean, - transitionLabel?: string + transitionLabel?: string, ): Transition { const connectedNode = this.beingParsed.nameToNode?.get( - connection.targetReference + connection.targetReference, ); if (!connectedNode) { throw new Error( - ERROR_MESSAGES.couldNotFindConnectedNode(connection.targetReference) + ERROR_MESSAGES.couldNotFindConnectedNode(connection.targetReference), ); } return { @@ -310,7 +310,7 @@ export class FlowParser { } private getTransitionsFromDecision( - node: flowTypes.FlowDecision + node: flowTypes.FlowDecision, ): Transition[] { const result: Transition[] = []; if (node.defaultConnector) { @@ -319,15 +319,15 @@ export class FlowParser { node, node.defaultConnector, false, - node.defaultConnectorLabel - ) + node.defaultConnectorLabel, + ), ); } if (node.rules) { for (const rule of node.rules) { if (rule && rule.connector) { result.push( - this.createTransition(node, rule.connector, false, rule.label) + this.createTransition(node, rule.connector, false, rule.label), ); } } @@ -352,17 +352,17 @@ export class FlowParser { | flowTypes.FlowRecordLookup | flowTypes.FlowRecordUpdate | flowTypes.FlowApexPluginCall - | flowTypes.FlowActionCall + | flowTypes.FlowActionCall, ): Transition[] { const result: Transition[] = []; if (node.connector) { result.push( - this.createTransition(node, node.connector, false, undefined) + this.createTransition(node, node.connector, false, undefined), ); } if (node.faultConnector) { result.push( - this.createTransition(node, node.faultConnector, true, FAULT) + this.createTransition(node, node.faultConnector, true, FAULT), ); } return result; @@ -372,7 +372,7 @@ export class FlowParser { const result: Transition[] = []; if (node.nextValueConnector) { result.push( - this.createTransition(node, node.nextValueConnector, false, "for each") + this.createTransition(node, node.nextValueConnector, false, "for each"), ); } if (node.noMoreValuesConnector) { @@ -381,8 +381,8 @@ export class FlowParser { node, node.noMoreValuesConnector, false, - "after all" - ) + "after all", + ), ); } return result; @@ -396,13 +396,15 @@ export class FlowParser { | flowTypes.FlowSubflow | flowTypes.FlowRecordRollback | flowTypes.FlowTransform - | flowTypes.FlowCustomError + | flowTypes.FlowCustomError, ): Transition[] { const result: Transition[] = []; if (node.connector) { - for (const connector of Array.isArray(node.connector) - ? node.connector - : [node.connector]) { + for ( + const connector of Array.isArray(node.connector) + ? node.connector + : [node.connector] + ) { result.push(this.createTransition(node, connector, false, undefined)); } } @@ -417,13 +419,13 @@ export class FlowParser { node, node.defaultConnector, false, - node.defaultConnectorLabel - ) + node.defaultConnectorLabel, + ), ); } if (node.faultConnector) { result.push( - this.createTransition(node, node.faultConnector, true, FAULT) + this.createTransition(node, node.faultConnector, true, FAULT), ); } return result; @@ -448,14 +450,14 @@ function ensureArray(input: T[] | undefined): T[] | undefined { * array. */ function setOrchestratedStageSteps( - orchestratedStages: flowTypes.FlowOrchestratedStage[] | undefined + orchestratedStages: flowTypes.FlowOrchestratedStage[] | undefined, ) { orchestratedStages?.forEach((stage) => { if (!stage.stageSteps) { return; } stage.stageSteps = ensureArray( - stage.stageSteps + stage.stageSteps, ) as flowTypes.FlowStageStep[]; }); } @@ -498,17 +500,17 @@ function setRuleConditions(rules: flowTypes.FlowRule[] | undefined) { * need to be converted to an array. */ function setRecordLookups( - recordLookups: flowTypes.FlowRecordLookup[] | undefined + recordLookups: flowTypes.FlowRecordLookup[] | undefined, ) { recordLookups?.forEach((recordLookup) => { if (recordLookup.filters) { recordLookup.filters = ensureArray( - recordLookup.filters + recordLookup.filters, ) as flowTypes.FlowRecordFilter[]; } if (recordLookup.queriedFields) { recordLookup.queriedFields = ensureArray( - recordLookup.queriedFields + recordLookup.queriedFields, ) as string[]; } }); @@ -518,18 +520,18 @@ function setRecordLookups( * Ensures that filters and inputAssignments are arrays in record update nodes */ function setRecordUpdates( - recordUpdates: flowTypes.FlowRecordUpdate[] | undefined + recordUpdates: flowTypes.FlowRecordUpdate[] | undefined, ) { if (!recordUpdates) { return; } for (const recordUpdate of recordUpdates) { recordUpdate.filters = ensureArray( - recordUpdate.filters ?? [] + recordUpdate.filters ?? [], ) as flowTypes.FlowRecordFilter[]; recordUpdate.inputAssignments = ensureArray( - recordUpdate.inputAssignments ?? [] + recordUpdate.inputAssignments ?? [], ) as flowTypes.FlowInputFieldAssignment[]; } } @@ -541,14 +543,14 @@ function setRecordUpdates( * an array. */ function setCustomErrorMessages( - customErrors: flowTypes.FlowCustomError[] | undefined + customErrors: flowTypes.FlowCustomError[] | undefined, ) { if (!customErrors) { return; } for (const customError of customErrors) { customError.customErrorMessages = ensureArray( - customError.customErrorMessages + customError.customErrorMessages, ) as flowTypes.FlowCustomErrorMessage[]; } } @@ -558,17 +560,17 @@ function setCustomErrorMessages( * of node. */ function isAssignment( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowAssignment { return (node as flowTypes.FlowAssignment).assignmentItems !== undefined; } function isCollectionProcessor( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowCollectionProcessor { return ( (node as flowTypes.FlowCollectionProcessor).collectionProcessorType !== - undefined + undefined ); } @@ -581,7 +583,7 @@ function isSubflow(node: flowTypes.FlowNode): node is flowTypes.FlowSubflow { } function isTransform( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowTransform { return (node as flowTypes.FlowTransform).dataType !== undefined; } @@ -603,49 +605,49 @@ function isLoop(node: flowTypes.FlowNode): node is flowTypes.FlowLoop { } function isRecordCreate( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowRecordCreate { return (node as flowTypes.FlowRecordCreate).inputReference !== undefined; } function isRecordDelete( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowRecordDelete { return (node as flowTypes.FlowRecordDelete).inputReference !== undefined; } function isRecordLookup( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowRecordLookup { return (node as flowTypes.FlowRecordLookup).filters !== undefined; } function isRecordUpdate( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowRecordUpdate { return (node as flowTypes.FlowRecordUpdate).inputReference !== undefined; } function isRecordRollback( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowRecordRollback { return (node as flowTypes.FlowRecordRollback).connector !== undefined; } function isApexPluginCall( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowApexPluginCall { return (node as flowTypes.FlowApexPluginCall).apexClass !== undefined; } function isFlowActionCall( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowActionCall { return (node as flowTypes.FlowActionCall).actionName !== undefined; } function isCustomError( - node: flowTypes.FlowNode + node: flowTypes.FlowNode, ): node is flowTypes.FlowCustomError { return (node as flowTypes.FlowCustomError).customErrorMessages !== undefined; } diff --git a/src/main/flow_to_uml_transformer.ts b/src/main/flow_to_uml_transformer.ts index 1c3c521..d52013a 100644 --- a/src/main/flow_to_uml_transformer.ts +++ b/src/main/flow_to_uml_transformer.ts @@ -59,7 +59,7 @@ export class FlowToUmlTransformer { constructor( private readonly filePaths: string[], private readonly generatorContext: UmlGeneratorContext, - private readonly changeDetector: FlowFileChangeDetector + private readonly changeDetector: FlowFileChangeDetector, ) {} async transformToUmlDiagrams(): Promise> { @@ -75,7 +75,7 @@ export class FlowToUmlTransformer { } private async transformToUmlDiagram( - filePath: string + filePath: string, ): Promise { return new Promise(async (resolve, reject) => { try { diff --git a/src/main/flow_types.ts b/src/main/flow_types.ts index c1b7210..c202ff9 100644 --- a/src/main/flow_types.ts +++ b/src/main/flow_types.ts @@ -40,7 +40,8 @@ export enum FlowAssignmentOperator { export enum FlowCollectionProcessorType { SORT_COLLECTION_PROCESSOR = "SortCollectionProcessor", - RECOMMENDATION_MAP_COLLECTION_PROCESSOR = "RecommendationMapCollectionProcessor", + RECOMMENDATION_MAP_COLLECTION_PROCESSOR = + "RecommendationMapCollectionProcessor", FILTER_COLLECTION_PROCESSOR = "FilterCollectionProcessor", } diff --git a/src/main/github_client.ts b/src/main/github_client.ts index 86aa412..2a21d14 100644 --- a/src/main/github_client.ts +++ b/src/main/github_client.ts @@ -111,7 +111,8 @@ export class GithubClient { throw new Error(ERROR_MESSAGES.MISSING_PR_NUMBER); } - const endpoint = `POST /repos/${this.context.repo.owner}/${this.context.repo.repo}/pulls/${pullRequestNumber}/comments`; + const endpoint = + `POST /repos/${this.context.repo.owner}/${this.context.repo.repo}/pulls/${pullRequestNumber}/comments`; await this.octokit.request(endpoint, comment); } @@ -158,14 +159,14 @@ export class GithubClient { try { const response = await this.octokit.request( - `GET /repos/${owner}/${repo}/pulls/${prNumber}/comments` + `GET /repos/${owner}/${repo}/pulls/${prNumber}/comments`, ); return response.data; } catch (error) { throw new Error( ERROR_MESSAGES.FETCH_COMMENTS_FAILED( - error instanceof Error ? error.message : "Unknown error" - ) + error instanceof Error ? error.message : "Unknown error", + ), ); } } @@ -186,14 +187,14 @@ export class GithubClient { try { await this.octokit.request( - `DELETE /repos/${owner}/${repo}/pulls/comments/${commentId}` + `DELETE /repos/${owner}/${repo}/pulls/comments/${commentId}`, ); } catch (error) { throw new Error( ERROR_MESSAGES.DELETE_COMMENT_FAILED( commentId, - error instanceof Error ? error.message : "Unknown error" - ) + error instanceof Error ? error.message : "Unknown error", + ), ); } } diff --git a/src/main/graphviz_generator.ts b/src/main/graphviz_generator.ts index f692c48..3601441 100644 --- a/src/main/graphviz_generator.ts +++ b/src/main/graphviz_generator.ts @@ -21,11 +21,11 @@ import type { Transition } from "./flow_parser.ts"; import * as flowTypes from "./flow_types.ts"; import { - UmlGenerator, type DiagramNode, + Icon as UmlIcon, type InnerNode, SkinColor as UmlSkinColor, - Icon as UmlIcon, + UmlGenerator, } from "./uml_generator.ts"; const EOL = "\n"; @@ -125,8 +125,8 @@ node [shape=box, style="filled, rounded"]`; } toUmlString(node: DiagramNode): string { - const graphvizSkinColor = - GraphVizGenerator.SKIN_COLOR_MAP[node.color] || SkinColor.NONE; + const graphvizSkinColor = GraphVizGenerator.SKIN_COLOR_MAP[node.color] || + SkinColor.NONE; const graphvizIcon = GraphVizGenerator.ICON_MAP[node.icon] || Icon.NONE; return getNodeBody( @@ -134,14 +134,14 @@ node [shape=box, style="filled, rounded"]`; node.type, graphvizIcon, graphvizSkinColor, - this.generateInnerNodeBodyFromInnerNodes(node, node.innerNodes) + this.generateInnerNodeBodyFromInnerNodes(node, node.innerNodes), ); } // Helper method to generate inner node body from DiagramNode's innerNodes generateInnerNodeBodyFromInnerNodes( parentNode: DiagramNode, - innerNodes?: InnerNode[] + innerNodes?: InnerNode[], ): string { if (!innerNodes || innerNodes.length === 0) { return ""; @@ -155,7 +155,7 @@ node [shape=box, style="filled, rounded"]`; FontColor.WHITE, getLabel(innerNode.type), getLabel(innerNode.label), - innerNode.content + innerNode.content, ); result.push(innerNodeBody); } @@ -181,13 +181,14 @@ function getNodeBody( type: string, icon: Icon, skinColor: SkinColor, - innerNodeBody?: string + innerNodeBody?: string, ): string { const formattedInnerNodeBody = innerNodeBody ? `${EOL}${innerNodeBody}${EOL}` : ""; - const fontColor = - skinColor === SkinColor.NONE ? FontColor.BLACK : FontColor.WHITE; + const fontColor = skinColor === SkinColor.NONE + ? FontColor.BLACK + : FontColor.WHITE; const diffStyle = node.diffStatus ? DIFF_STATUS_TO_STYLE[node.diffStatus] : ""; @@ -207,13 +208,15 @@ function getInnerNodeBody( color: FontColor, type: string, label: string, - content: string[] + content: string[], ) { const formattedLabel = getLabel([type, label].join(" ")); return ` - + ${formattedLabel} ${content.map((content) => `
${content}`).join("")} diff --git a/src/main/main.ts b/src/main/main.ts index 4f7482e..ed57599 100644 --- a/src/main/main.ts +++ b/src/main/main.ts @@ -46,12 +46,12 @@ export class Runner { } private async generateUml() { const generatorContext = new UmlGeneratorContext( - Configuration.getInstance().diagramTool + Configuration.getInstance().diagramTool, ); const transformer = new FlowToUmlTransformer( this.flowFilePaths, generatorContext, - this.changeDetector + this.changeDetector, ); this.filePathToFlowDifference = await transformer.transformToUmlDiagrams(); } diff --git a/src/main/mermaid_generator.ts b/src/main/mermaid_generator.ts index 53fba28..953c97c 100644 --- a/src/main/mermaid_generator.ts +++ b/src/main/mermaid_generator.ts @@ -163,7 +163,8 @@ export class MermaidGenerator extends UmlGenerator { const symbol = MermaidGenerator.DIFF_STATUS_TO_SYMBOL[node.diffStatus]; const color = MermaidGenerator.DIFF_STATUS_TO_COLOR[node.diffStatus]; // Add more horizontal padding around the diff indicator - diffStatus = `${symbol}`; + diffStatus = + `${symbol}`; } const sanitizedLabel = this.sanitizeLabel(node.label); diff --git a/src/main/plantuml_generator.ts b/src/main/plantuml_generator.ts index 7e031b2..5f323f5 100644 --- a/src/main/plantuml_generator.ts +++ b/src/main/plantuml_generator.ts @@ -21,9 +21,9 @@ import type { Transition } from "./flow_parser.ts"; import * as flowTypes from "./flow_types.ts"; import { - UmlGenerator, type DiagramNode, Icon as UmlIcon, + UmlGenerator, } from "./uml_generator.ts"; const EOL = "\n"; @@ -148,7 +148,7 @@ title ${label}`; plantUmlIcon, plantUmlSkinColor, diffIcon, - diffStyleClass + diffStyleClass, ); // Handle inner nodes if they exist @@ -196,11 +196,13 @@ function generateNode( icon: Icon, skinColor: SkinColor, diffIcon: DiffIcon = DiffIcon.NONE, - diffStyleClass: string = "" + diffStyleClass: string = "", ): string { - return `state "${diffIcon}**${type}**${icon} \\n ${getLabel( - label - )}" as ${name}${skinColor}${diffStyleClass}`; + return `state "${diffIcon}**${type}**${icon} \\n ${ + getLabel( + label, + ) + }" as ${name}${skinColor}${diffStyleClass}`; } function getLabel(label: string) { diff --git a/src/main/uml_generator.ts b/src/main/uml_generator.ts index 7472674..04836a2 100644 --- a/src/main/uml_generator.ts +++ b/src/main/uml_generator.ts @@ -94,75 +94,75 @@ export abstract class UmlGenerator { this.getHeader(this.parsedFlow.label!), this.processFlowElements( this.parsedFlow.apexPluginCalls, - (node) => this.getFlowApexPluginCall(node) + (node) => this.getFlowApexPluginCall(node), ), this.processFlowElements( this.parsedFlow.assignments, - (node) => this.getFlowAssignment(node) + (node) => this.getFlowAssignment(node), ), this.processFlowElements( this.parsedFlow.collectionProcessors, - (node) => this.getFlowCollectionProcessor(node) + (node) => this.getFlowCollectionProcessor(node), ), this.processFlowElements( this.parsedFlow.decisions, - (node) => this.getFlowDecision(node) + (node) => this.getFlowDecision(node), ), this.processFlowElements( this.parsedFlow.loops, - (node) => this.getFlowLoop(node) + (node) => this.getFlowLoop(node), ), this.processFlowElements( this.parsedFlow.orchestratedStages, - (node) => this.getFlowOrchestratedStage(node) + (node) => this.getFlowOrchestratedStage(node), ), this.processFlowElements( this.parsedFlow.recordCreates, - (node) => this.getFlowRecordCreate(node) + (node) => this.getFlowRecordCreate(node), ), this.processFlowElements( this.parsedFlow.recordDeletes, - (node) => this.getFlowRecordDelete(node) + (node) => this.getFlowRecordDelete(node), ), this.processFlowElements( this.parsedFlow.recordLookups, - (node) => this.getFlowRecordLookup(node) + (node) => this.getFlowRecordLookup(node), ), this.processFlowElements( this.parsedFlow.recordRollbacks, - (node) => this.getFlowRecordRollback(node) + (node) => this.getFlowRecordRollback(node), ), this.processFlowElements( this.parsedFlow.recordUpdates, - (node) => this.getFlowRecordUpdate(node) + (node) => this.getFlowRecordUpdate(node), ), this.processFlowElements( this.parsedFlow.screens, - (node) => this.getFlowScreen(node) + (node) => this.getFlowScreen(node), ), this.processFlowElements( this.parsedFlow.steps, - (node) => this.getFlowStep(node) + (node) => this.getFlowStep(node), ), this.processFlowElements( this.parsedFlow.subflows, - (node) => this.getFlowSubflow(node) + (node) => this.getFlowSubflow(node), ), this.processFlowElements( this.parsedFlow.transforms, - (node) => this.getFlowTransform(node) + (node) => this.getFlowTransform(node), ), this.processFlowElements( this.parsedFlow.waits, - (node) => this.getFlowWait(node) + (node) => this.getFlowWait(node), ), this.processFlowElements( this.parsedFlow.actionCalls, - (node) => this.getFlowActionCall(node) + (node) => this.getFlowActionCall(node), ), this.processFlowElements( this.parsedFlow.customErrors, - (node) => this.getFlowCustomError(node) + (node) => this.getFlowCustomError(node), ), this.processTransitions(this.parsedFlow.transitions), this.getFooter(), @@ -198,7 +198,7 @@ export abstract class UmlGenerator { } private getFlowCollectionProcessor( - node: flowTypes.FlowCollectionProcessor + node: flowTypes.FlowCollectionProcessor, ): string { return this.toUmlString({ id: node.name, @@ -231,9 +231,9 @@ export abstract class UmlGenerator { let conditionCounter = 1; const conditions = rule.conditions.map( (condition) => - `${conditionCounter++}. ${condition.leftValueReference} ${ - condition.operator - } ${toString(condition.rightValue)}` + `${conditionCounter++}. ${condition.leftValueReference} ${condition.operator} ${ + toString(condition.rightValue) + }`, ); if (conditions.length > 1) { const logicLabel = `Logic: ${rule.conditionLogic}`; @@ -261,7 +261,7 @@ export abstract class UmlGenerator { } private getFlowOrchestratedStage( - node: flowTypes.FlowOrchestratedStage + node: flowTypes.FlowOrchestratedStage, ): string { return this.toUmlString({ id: node.name, @@ -275,7 +275,7 @@ export abstract class UmlGenerator { } private getFlowOrchestratedStageInnerNodes( - node: flowTypes.FlowOrchestratedStage + node: flowTypes.FlowOrchestratedStage, ): InnerNode[] { let counter = 1; const result: InnerNode[] = []; @@ -328,7 +328,7 @@ export abstract class UmlGenerator { } private getFlowRecordLookupInnerNodes( - node: flowTypes.FlowRecordLookup + node: flowTypes.FlowRecordLookup, ): InnerNode[] { const innerNodeContent: string[] = []; innerNodeContent.push(...this.getFieldsQueried(node)); @@ -363,9 +363,11 @@ export abstract class UmlGenerator { `Filter Logic: ${node.filterLogic ? node.filterLogic : "None"}`, ]; const filters = node.filters?.map((filter, index) => { - return `${index + 1}. ${filter.field} ${filter.operator} ${toString( - filter.value - )}`; + return `${index + 1}. ${filter.field} ${filter.operator} ${ + toString( + filter.value, + ) + }`; }); if (filters) { result.push(...filters); @@ -407,7 +409,7 @@ export abstract class UmlGenerator { } private getFlowRecordUpdateInnerNodes( - node: flowTypes.FlowRecordUpdate + node: flowTypes.FlowRecordUpdate, ): InnerNode[] { const innerNodeContent: string[] = []; @@ -415,9 +417,11 @@ export abstract class UmlGenerator { innerNodeContent.push("Filter Criteria:"); node.filters.forEach((filter, index) => { innerNodeContent.push( - `${index + 1}. ${filter.field} ${filter.operator} ${toString( - filter.value - )}` + `${index + 1}. ${filter.field} ${filter.operator} ${ + toString( + filter.value, + ) + }`, ); }); } @@ -426,7 +430,7 @@ export abstract class UmlGenerator { innerNodeContent.push("Field Updates:"); node.inputAssignments.forEach((assignment) => { innerNodeContent.push( - `${assignment.field} = ${toString(assignment.value)}` + `${assignment.field} = ${toString(assignment.value)}`, ); }); } @@ -525,7 +529,7 @@ export abstract class UmlGenerator { } private getFlowCustomErrorInnerNodes( - node: flowTypes.FlowCustomError + node: flowTypes.FlowCustomError, ): InnerNode[] { const innerNodeContent: string[] = []; @@ -535,7 +539,7 @@ export abstract class UmlGenerator { ? ` (Field: ${message.fieldSelection})` : ""; innerNodeContent.push( - `${index + 1}. ${message.errorMessage}${fieldInfo}` + `${index + 1}. ${message.errorMessage}${fieldInfo}`, ); }); } @@ -552,7 +556,7 @@ export abstract class UmlGenerator { private processFlowElements( elements: T[] | undefined, - elementProcessor: (element: T) => string + elementProcessor: (element: T) => string, ): string { return elements?.map(elementProcessor).join(EOL) ?? ""; } @@ -568,7 +572,7 @@ export abstract class UmlGenerator { function toString(element: flowTypes.FlowElementReferenceOrValue | undefined) { if (!element) { - return ''; + return ""; } if ( element.apexValue || @@ -582,13 +586,13 @@ function toString(element: flowTypes.FlowElementReferenceOrValue | undefined) { ) { return ( element.stringValue ?? - element.sobjectValue ?? - element.apexValue ?? - element.elementReference ?? - element.formulaExpression ?? - element.setupReference ?? - element.transformValueReference ?? - element.formulaDataType + element.sobjectValue ?? + element.apexValue ?? + element.elementReference ?? + element.formulaExpression ?? + element.setupReference ?? + element.transformValueReference ?? + element.formulaDataType ); } if (element.dateTimeValue) { diff --git a/src/main/uml_writer.ts b/src/main/uml_writer.ts index de5f7b7..459a2fd 100644 --- a/src/main/uml_writer.ts +++ b/src/main/uml_writer.ts @@ -40,8 +40,8 @@ export class UmlWriter { constructor( private readonly filePathToFlowDifference: Map, private readonly githubClient = new GithubClient( - Deno.env.get("GITHUB_TOKEN") || "" - ) + Deno.env.get("GITHUB_TOKEN") || "", + ), ) {} /** @@ -61,16 +61,16 @@ export class UmlWriter { fs.writeFileSync( path.join( config.outputDirectory!, - `${config.outputFileName!}${FILE_EXTENSION}` + `${config.outputFileName!}${FILE_EXTENSION}`, ), - JSON.stringify(fileBody, null, 2) + JSON.stringify(fileBody, null, 2), ); } private async writeGithubComment(config: RuntimeConfig) { try { - const existingComments = - await this.githubClient.getAllCommentsForPullRequest(); + const existingComments = await this.githubClient + .getAllCommentsForPullRequest(); const flowLensComments = existingComments.filter((comment) => comment.body.includes(HIDDEN_COMMENT_PREFIX) @@ -83,7 +83,7 @@ export class UmlWriter { for (const [filePath, flowDifference] of this.filePathToFlowDifference) { const comment = this.githubClient.translateToComment( getBody(flowDifference), - filePath + filePath, ); await this.githubClient.writeComment(comment); } @@ -101,7 +101,7 @@ interface DefaultFormat { interface Formatter { format( - filePathToFlowDifference: Map + filePathToFlowDifference: Map, ): DefaultFormat[]; } @@ -111,7 +111,7 @@ class DefaultFormatter implements Formatter { * @param filePathToFlowDifference A map of file paths to UML diagrams. */ format( - filePathToFlowDifference: Map + filePathToFlowDifference: Map, ): DefaultFormat[] { const result: DefaultFormat[] = []; for (const [filePath, flowDifference] of filePathToFlowDifference) { diff --git a/src/test/argument_processor_test.ts b/src/test/argument_processor_test.ts index ba67626..eefc042 100644 --- a/src/test/argument_processor_test.ts +++ b/src/test/argument_processor_test.ts @@ -22,21 +22,7 @@ import { RuntimeConfig, } from "../main/argument_processor.ts"; import { assertEquals, assertThrows } from "@std/assert"; - -/** - * The test configuration object that is used by the ArgumentProcessor tests. - */ -export function getTestConfig(): RuntimeConfig { - return { - diagramTool: DiagramTool.PLANTUML, - filePath: [], - gitDiffFromHash: "HEAD~", - gitDiffToHash: "HEAD", - outputDirectory: "/", - outputFileName: "test", - mode: Mode.JSON, - }; -} +import { getTestConfig } from "./test_utils.ts"; const INVALID_DIAGRAM_TOOL = "unsupported"; const INVALID_FILE_PATH = "invalid/file/path/which/does/not/exist"; @@ -45,7 +31,7 @@ const INVALID_OUTPUT_DIRECTORY = "invalid/directory/path"; const INVALID_MODE = "unsupported"; function setupTest( - configModifications: (config: RuntimeConfig) => void = () => {} + configModifications: (config: RuntimeConfig) => void = () => {}, ) { let testConfiguration = getTestConfig(); configModifications(testConfiguration); @@ -55,237 +41,238 @@ function setupTest( }; } -Deno.test( - "ArgumentProcessor should validate when it has the proper configuration", - () => { +Deno.test("ArgumentProcessor", async (t) => { + await t.step("should validate when it has the proper configuration", () => { const { argumentProcessor, config } = setupTest(); const result = argumentProcessor.getConfig(); assertEquals(result, config); - } -); + }); -Deno.test( - "ArgumentProcessor should throw an exception when the diagram tool is not supported", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest( - (config) => (config.diagramTool = INVALID_DIAGRAM_TOOL as DiagramTool) - ); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.unsupportedDiagramTool(INVALID_DIAGRAM_TOOL) - ); - } -); + await t.step( + "should throw an exception when the diagram tool is not supported", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest( + ( + config, + ) => (config.diagramTool = INVALID_DIAGRAM_TOOL as DiagramTool), + ); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.unsupportedDiagramTool(INVALID_DIAGRAM_TOOL), + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when the mode is not supported", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest( - (config) => (config.mode = INVALID_MODE as Mode) - ); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.unsupportedMode(INVALID_MODE) - ); - } -); + await t.step( + "should throw an exception when the mode is not supported", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest( + (config) => (config.mode = INVALID_MODE as Mode), + ); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.unsupportedMode(INVALID_MODE), + ); + }, + ); -Deno.test( - "ArgumentProcessor should validate when mode is GITHUB_ACTION and outputDirectory/outputFileName are not provided", - () => { - const { argumentProcessor, config } = setupTest((config) => { - config.mode = Mode.GITHUB_ACTION; - config.outputDirectory = undefined; - config.outputFileName = undefined; - }); - const result = argumentProcessor.getConfig(); - assertEquals(result, config); - } -); + await t.step( + "should validate when mode is GITHUB_ACTION and outputDirectory/outputFileName are not provided", + () => { + const { argumentProcessor, config } = setupTest((config) => { + config.mode = Mode.GITHUB_ACTION; + config.outputDirectory = undefined; + config.outputFileName = undefined; + }); + const result = argumentProcessor.getConfig(); + assertEquals(result, config); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when outputDirectory is not provided in JSON mode", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest((config) => { - config.mode = Mode.JSON; - config.outputDirectory = undefined; - }); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.outputDirectoryRequired - ); - } -); + await t.step( + "should throw an exception when outputDirectory is not provided in JSON mode", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest((config) => { + config.mode = Mode.JSON; + config.outputDirectory = undefined; + }); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.outputDirectoryRequired, + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when outputFileName is not provided in JSON mode", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest((config) => { - config.mode = Mode.JSON; - config.outputFileName = undefined; - }); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.outputFileNameRequired - ); - } -); + await t.step( + "should throw an exception when outputFileName is not provided in JSON mode", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest((config) => { + config.mode = Mode.JSON; + config.outputFileName = undefined; + }); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.outputFileNameRequired, + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when the file path is not valid", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest((config) => { - config.filePath = [INVALID_FILE_PATH]; - config.gitDiffFromHash = undefined; - config.gitDiffToHash = undefined; - }); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.filePathDoesNotExist(INVALID_FILE_PATH) - ); - } -); + await t.step( + "should throw an exception when the file path is not valid", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest((config) => { + config.filePath = [INVALID_FILE_PATH]; + config.gitDiffFromHash = undefined; + config.gitDiffToHash = undefined; + }); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.filePathDoesNotExist(INVALID_FILE_PATH), + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when the output file name is not populated in JSON mode", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest( - (config) => (config.outputFileName = "") - ); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.outputFileNameRequired - ); - } -); + await t.step( + "should throw an exception when the output file name is not populated in JSON mode", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest( + (config) => (config.outputFileName = ""), + ); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.outputFileNameRequired, + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when the output file name is not supported in JSON mode", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest( - (config) => (config.outputFileName = INVALID_OUTPUT_FILE_NAME) - ); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.invalidOutputFileName(INVALID_OUTPUT_FILE_NAME) - ); - } -); + await t.step( + "should throw an exception when the output file name is not supported in JSON mode", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest( + (config) => (config.outputFileName = INVALID_OUTPUT_FILE_NAME), + ); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.invalidOutputFileName(INVALID_OUTPUT_FILE_NAME), + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when the output directory is not valid in JSON mode", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest( - (config) => (config.outputDirectory = INVALID_OUTPUT_DIRECTORY) - ); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.invalidOutputDirectory(INVALID_OUTPUT_DIRECTORY) - ); - } -); + await t.step( + "should throw an exception when the output directory is not valid in JSON mode", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest( + (config) => (config.outputDirectory = INVALID_OUTPUT_DIRECTORY), + ); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.invalidOutputDirectory(INVALID_OUTPUT_DIRECTORY), + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when the output directory is not specified in JSON mode", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest( - (config) => (config.outputDirectory = "") - ); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.outputDirectoryRequired - ); - } -); + await t.step( + "should throw an exception when the output directory is not specified in JSON mode", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest( + (config) => (config.outputDirectory = ""), + ); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.outputDirectoryRequired, + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when either the filePath or (gitDiffFromHash and gitDiffToHash) are not specified", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest((config) => { - config.gitDiffToHash = undefined; - config.gitDiffFromHash = undefined; - }); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.filePathOrGitDiffFromAndToHashRequired - ); - } -); + await t.step( + "should throw an exception when either the filePath or (gitDiffFromHash and gitDiffToHash) are not specified", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest((config) => { + config.gitDiffToHash = undefined; + config.gitDiffFromHash = undefined; + }); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.filePathOrGitDiffFromAndToHashRequired, + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when either the filePath and gitDiffFromHash and gitDiffToHash are specified", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest( - (config) => (config.filePath = [INVALID_FILE_PATH]) - ); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.filePathAndGitDiffFromAndToHashMutuallyExclusive - ); - } -); + await t.step( + "should throw an exception when either the filePath and gitDiffFromHash and gitDiffToHash are specified", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest( + (config) => (config.filePath = [INVALID_FILE_PATH]), + ); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.filePathAndGitDiffFromAndToHashMutuallyExclusive, + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when the `gitDiffFromHash` is specified but `gitDiffToHash` is not", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest( - (config) => (config.gitDiffToHash = undefined) - ); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.gitDiffFromAndToHashMustBeSpecifiedTogether - ); - } -); + await t.step( + "should throw an exception when the `gitDiffFromHash` is specified but `gitDiffToHash` is not", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest( + (config) => (config.gitDiffToHash = undefined), + ); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.gitDiffFromAndToHashMustBeSpecifiedTogether, + ); + }, + ); -Deno.test( - "ArgumentProcessor should throw an exception when the `gitDiffToHash` is specified but `gitDiffFromHash` is not", - () => { - assertThrows( - () => { - const { argumentProcessor } = setupTest( - (config) => (config.gitDiffFromHash = undefined) - ); - argumentProcessor.getConfig(); - }, - Error, - ERROR_MESSAGES.gitDiffFromAndToHashMustBeSpecifiedTogether - ); - } -); + await t.step( + "should throw an exception when the `gitDiffToHash` is specified but `gitDiffFromHash` is not", + () => { + assertThrows( + () => { + const { argumentProcessor } = setupTest( + (config) => (config.gitDiffFromHash = undefined), + ); + argumentProcessor.getConfig(); + }, + Error, + ERROR_MESSAGES.gitDiffFromAndToHashMustBeSpecifiedTogether, + ); + }, + ); +}); diff --git a/src/test/flow_comparator_test.ts b/src/test/flow_comparator_test.ts index 3bc6d02..bd2e356 100644 --- a/src/test/flow_comparator_test.ts +++ b/src/test/flow_comparator_test.ts @@ -42,44 +42,46 @@ const NODE_MODIFIED = { function createParsedFlow(nodes: flowTypes.FlowNode[]): ParsedFlow { return { nameToNode: new Map( - nodes.map((node) => [node.name, node]) + nodes.map((node) => [node.name, node]), ), }; } -Deno.test("compareFlows should set the diff status of a deleted node", () => { - const oldFlow: ParsedFlow = createParsedFlow([NODE]); - const newFlow: ParsedFlow = createParsedFlow([]); +Deno.test("FlowComparator", async (t) => { + await t.step("should set the diff status of a deleted node", () => { + const oldFlow: ParsedFlow = createParsedFlow([NODE]); + const newFlow: ParsedFlow = createParsedFlow([]); - compareFlows(oldFlow, newFlow); + compareFlows(oldFlow, newFlow); - const oldNode = oldFlow.nameToNode?.get(NODE.name); - assertEquals(oldNode != null, true); - assertEquals(oldNode!.diffStatus, flowTypes.DiffStatus.DELETED); -}); + const oldNode = oldFlow.nameToNode?.get(NODE.name); + assertEquals(oldNode != null, true); + assertEquals(oldNode!.diffStatus, flowTypes.DiffStatus.DELETED); + }); -Deno.test("compareFlows should set the diff status of a modified node", () => { - const oldFlow: ParsedFlow = createParsedFlow([NODE]); - const newFlow: ParsedFlow = createParsedFlow([NODE_MODIFIED]); + await t.step("should set the diff status of a modified node", () => { + const oldFlow: ParsedFlow = createParsedFlow([NODE]); + const newFlow: ParsedFlow = createParsedFlow([NODE_MODIFIED]); - compareFlows(oldFlow, newFlow); + compareFlows(oldFlow, newFlow); - const oldNode = oldFlow.nameToNode?.get(NODE.name); - const newNode = newFlow.nameToNode?.get(NODE_MODIFIED.name); + const oldNode = oldFlow.nameToNode?.get(NODE.name); + const newNode = newFlow.nameToNode?.get(NODE_MODIFIED.name); - assertEquals(oldNode != null, true); - assertEquals(newNode != null, true); - assertEquals(oldNode?.diffStatus, flowTypes.DiffStatus.MODIFIED); - assertEquals(newNode?.diffStatus, flowTypes.DiffStatus.MODIFIED); -}); + assertEquals(oldNode != null, true); + assertEquals(newNode != null, true); + assertEquals(oldNode?.diffStatus, flowTypes.DiffStatus.MODIFIED); + assertEquals(newNode?.diffStatus, flowTypes.DiffStatus.MODIFIED); + }); -Deno.test("compareFlows should set the diff status of an added node", () => { - const oldFlow: ParsedFlow = createParsedFlow([]); - const newFlow: ParsedFlow = createParsedFlow([NODE]); + await t.step("should set the diff status of an added node", () => { + const oldFlow: ParsedFlow = createParsedFlow([]); + const newFlow: ParsedFlow = createParsedFlow([NODE]); - compareFlows(oldFlow, newFlow); + compareFlows(oldFlow, newFlow); - const newNode = newFlow.nameToNode?.get(NODE.name); - assertEquals(newNode != null, true); - assertEquals(newNode?.diffStatus, flowTypes.DiffStatus.ADDED); + const newNode = newFlow.nameToNode?.get(NODE.name); + assertEquals(newNode != null, true); + assertEquals(newNode?.diffStatus, flowTypes.DiffStatus.ADDED); + }); }); diff --git a/src/test/flow_file_change_detector_test.ts b/src/test/flow_file_change_detector_test.ts index 3b873a4..9ed353d 100644 --- a/src/test/flow_file_change_detector_test.ts +++ b/src/test/flow_file_change_detector_test.ts @@ -17,7 +17,7 @@ import { assertEquals, assertThrows } from "@std/assert"; import { Buffer } from "node:buffer"; import { Configuration } from "../main/argument_processor.ts"; -import { getTestConfig } from "./argument_processor_test.ts"; +import { getTestConfig } from "./test_utils.ts"; import { ERROR_MESSAGES, FLOW_FILE_EXTENSION, @@ -54,7 +54,7 @@ Deno.test("FlowFileChangeDetector", async (t) => { const flowFiles = detector.getFlowFiles(); assertEquals(flowFiles, [FLOW_FILE_PATH]); - } + }, ); await t.step("should throw error if git is not installed", () => { @@ -67,7 +67,7 @@ Deno.test("FlowFileChangeDetector", async (t) => { assertThrows( () => detector.getFlowFiles(), Error, - ERROR_MESSAGES.gitIsNotInstalledError + ERROR_MESSAGES.gitIsNotInstalledError, ); }); @@ -81,7 +81,7 @@ Deno.test("FlowFileChangeDetector", async (t) => { assertThrows( () => detector.getFlowFiles(), Error, - ERROR_MESSAGES.notInGitRepoError + ERROR_MESSAGES.notInGitRepoError, ); }); @@ -95,7 +95,7 @@ Deno.test("FlowFileChangeDetector", async (t) => { assertThrows( () => detector.getFlowFiles(), Error, - ERROR_MESSAGES.diffError(new Error("Diff error")) + ERROR_MESSAGES.diffError(new Error("Diff error")), ); }); @@ -125,8 +125,8 @@ Deno.test("FlowFileChangeDetector", async (t) => { Error, ERROR_MESSAGES.unableToGetFileContent( FLOW_FILE_PATH, - new Error("Get file content error") - ) + new Error("Get file content error"), + ), ); }); }); diff --git a/src/test/flow_parser_test.ts b/src/test/flow_parser_test.ts index 6b82358..a40c344 100644 --- a/src/test/flow_parser_test.ts +++ b/src/test/flow_parser_test.ts @@ -33,11 +33,11 @@ const TEST_FILES = { noStartNode: path.join(GOLDENS_PATH, "no_start_node.flow-meta.xml"), missingTransitionNode: path.join( GOLDENS_PATH, - "missing_transition_node.flow-meta.xml" + "missing_transition_node.flow-meta.xml", ), circularTransition: path.join( GOLDENS_PATH, - "circular_transition.flow-meta.xml" + "circular_transition.flow-meta.xml", ), rollback: path.join(GOLDENS_PATH, "rollback.flow-meta.xml"), }; @@ -69,7 +69,7 @@ Deno.test("FlowParser", async (t) => { await t.step("should parse valid XML into a flow object", async () => { systemUnderTest = new FlowParser( - fs.readFileSync(TEST_FILES.sample, ENCODING) + fs.readFileSync(TEST_FILES.sample, ENCODING), ); parsedFlow = await systemUnderTest.generateFlowDefinition(); @@ -118,7 +118,7 @@ Deno.test("FlowParser", async (t) => { await t.step("should handle circular transitions", async () => { systemUnderTest = new FlowParser( - fs.readFileSync(TEST_FILES.circularTransition, ENCODING) + fs.readFileSync(TEST_FILES.circularTransition, ENCODING), ); parsedFlow = await systemUnderTest.generateFlowDefinition(); @@ -145,7 +145,7 @@ Deno.test("FlowParser", async (t) => { "should ensure multiple node definitions are represented as arrays", async () => { systemUnderTest = new FlowParser( - fs.readFileSync(TEST_FILES.multipleElements, ENCODING) + fs.readFileSync(TEST_FILES.multipleElements, ENCODING), ); parsedFlow = await systemUnderTest.generateFlowDefinition(); @@ -155,80 +155,80 @@ Deno.test("FlowParser", async (t) => { // Compare actual parsedFlow nodes to expected based on the file assertEquals( parsedFlow.apexPluginCalls?.map((n) => n.name), - [NODE_NAMES.apexPluginCall, `${NODE_NAMES.apexPluginCall}2`] + [NODE_NAMES.apexPluginCall, `${NODE_NAMES.apexPluginCall}2`], ); assertEquals( parsedFlow.assignments?.map((n) => n.name), - [NODE_NAMES.assignment, `${NODE_NAMES.assignment}2`] + [NODE_NAMES.assignment, `${NODE_NAMES.assignment}2`], ); assertEquals( parsedFlow.collectionProcessors?.map((n) => n.name), - [NODE_NAMES.collectionProcessor, `${NODE_NAMES.collectionProcessor}2`] + [NODE_NAMES.collectionProcessor, `${NODE_NAMES.collectionProcessor}2`], ); assertEquals( parsedFlow.decisions?.map((n) => n.name), - [NODE_NAMES.decision, `${NODE_NAMES.decision}2`] + [NODE_NAMES.decision, `${NODE_NAMES.decision}2`], ); assertEquals( parsedFlow.loops?.map((n) => n.name), - [NODE_NAMES.loop, `${NODE_NAMES.loop}2`] + [NODE_NAMES.loop, `${NODE_NAMES.loop}2`], ); assertEquals( parsedFlow.orchestratedStages?.map((n) => n.name), - [NODE_NAMES.orchestratedStage, `${NODE_NAMES.orchestratedStage}2`] + [NODE_NAMES.orchestratedStage, `${NODE_NAMES.orchestratedStage}2`], ); assertEquals( parsedFlow.recordCreates?.map((n) => n.name), - [NODE_NAMES.recordCreate, `${NODE_NAMES.recordCreate}2`] + [NODE_NAMES.recordCreate, `${NODE_NAMES.recordCreate}2`], ); assertEquals( parsedFlow.recordDeletes?.map((n) => n.name), - [NODE_NAMES.recordDelete, `${NODE_NAMES.recordDelete}2`] + [NODE_NAMES.recordDelete, `${NODE_NAMES.recordDelete}2`], ); assertEquals( parsedFlow.recordLookups?.map((n) => n.name), - [NODE_NAMES.recordLookup, `${NODE_NAMES.recordLookup}2`] + [NODE_NAMES.recordLookup, `${NODE_NAMES.recordLookup}2`], ); assertEquals( parsedFlow.recordRollbacks?.map((n) => n.name), - [NODE_NAMES.recordRollback, `${NODE_NAMES.recordRollback}2`] + [NODE_NAMES.recordRollback, `${NODE_NAMES.recordRollback}2`], ); assertEquals( parsedFlow.recordUpdates?.map((n) => n.name), - [NODE_NAMES.recordUpdate, `${NODE_NAMES.recordUpdate}2`] + [NODE_NAMES.recordUpdate, `${NODE_NAMES.recordUpdate}2`], ); assertEquals( parsedFlow.screens?.map((n) => n.name), - [NODE_NAMES.screen, `${NODE_NAMES.screen}2`] + [NODE_NAMES.screen, `${NODE_NAMES.screen}2`], ); assertEquals( parsedFlow.steps?.map((n) => n.name), - [NODE_NAMES.step, `${NODE_NAMES.step}2`] + [NODE_NAMES.step, `${NODE_NAMES.step}2`], ); assertEquals( parsedFlow.subflows?.map((n) => n.name), - [NODE_NAMES.subflow, `${NODE_NAMES.subflow}2`] + [NODE_NAMES.subflow, `${NODE_NAMES.subflow}2`], ); assertEquals( parsedFlow.transforms?.map((n) => n.name), - [NODE_NAMES.transform, `${NODE_NAMES.transform}2`] + [NODE_NAMES.transform, `${NODE_NAMES.transform}2`], ); assertEquals( parsedFlow.waits?.map((n) => n.name), - [NODE_NAMES.wait, `${NODE_NAMES.wait}2`] + [NODE_NAMES.wait, `${NODE_NAMES.wait}2`], ); assertEquals( parsedFlow.actionCalls?.map((n) => n.name), - [NODE_NAMES.actionCall, `${NODE_NAMES.actionCall}2`] + [NODE_NAMES.actionCall, `${NODE_NAMES.actionCall}2`], ); - } + }, ); await t.step( "should ensure single node definitions are represented as arrays", async () => { systemUnderTest = new FlowParser( - fs.readFileSync(TEST_FILES.singleElements, ENCODING) + fs.readFileSync(TEST_FILES.singleElements, ENCODING), ); parsedFlow = await systemUnderTest.generateFlowDefinition(); @@ -237,78 +237,78 @@ Deno.test("FlowParser", async (t) => { // Compare actual parsedFlow nodes to expected based on the file assertEquals( parsedFlow.apexPluginCalls?.map((n) => n.name), - [NODE_NAMES.apexPluginCall] + [NODE_NAMES.apexPluginCall], ); assertEquals( parsedFlow.assignments?.map((n) => n.name), - [NODE_NAMES.assignment] + [NODE_NAMES.assignment], ); assertEquals( parsedFlow.collectionProcessors?.map((n) => n.name), - [NODE_NAMES.collectionProcessor] + [NODE_NAMES.collectionProcessor], ); assertEquals( parsedFlow.decisions?.map((n) => n.name), - [NODE_NAMES.decision] + [NODE_NAMES.decision], ); assertEquals( parsedFlow.loops?.map((n) => n.name), - [NODE_NAMES.loop] + [NODE_NAMES.loop], ); assertEquals( parsedFlow.orchestratedStages?.map((n) => n.name), - [NODE_NAMES.orchestratedStage] + [NODE_NAMES.orchestratedStage], ); assertEquals( parsedFlow.recordCreates?.map((n) => n.name), - [NODE_NAMES.recordCreate] + [NODE_NAMES.recordCreate], ); assertEquals( parsedFlow.recordDeletes?.map((n) => n.name), - [NODE_NAMES.recordDelete] + [NODE_NAMES.recordDelete], ); assertEquals( parsedFlow.recordLookups?.map((n) => n.name), - [NODE_NAMES.recordLookup] + [NODE_NAMES.recordLookup], ); assertEquals( parsedFlow.recordRollbacks?.map((n) => n.name), - [NODE_NAMES.recordRollback] + [NODE_NAMES.recordRollback], ); assertEquals( parsedFlow.recordUpdates?.map((n) => n.name), - [NODE_NAMES.recordUpdate] + [NODE_NAMES.recordUpdate], ); assertEquals( parsedFlow.screens?.map((n) => n.name), - [NODE_NAMES.screen] + [NODE_NAMES.screen], ); assertEquals( parsedFlow.steps?.map((n) => n.name), - [NODE_NAMES.step] + [NODE_NAMES.step], ); assertEquals( parsedFlow.subflows?.map((n) => n.name), - [NODE_NAMES.subflow] + [NODE_NAMES.subflow], ); assertEquals( parsedFlow.transforms?.map((n) => n.name), - [NODE_NAMES.transform] + [NODE_NAMES.transform], ); assertEquals( parsedFlow.waits?.map((n) => n.name), - [NODE_NAMES.wait] + [NODE_NAMES.wait], ); assertEquals( parsedFlow.actionCalls?.map((n) => n.name), - [NODE_NAMES.actionCall] + [NODE_NAMES.actionCall], ); - } + }, ); await t.step("should properly identify rollbacks", async () => { systemUnderTest = new FlowParser( - fs.readFileSync(TEST_FILES.rollback, ENCODING) + fs.readFileSync(TEST_FILES.rollback, ENCODING), ); parsedFlow = await systemUnderTest.generateFlowDefinition(); @@ -329,7 +329,7 @@ Deno.test("FlowParser", async (t) => { assertEquals( parsedFlow.screens?.map((n) => n.name), - [NODE_NAMES.screen] + [NODE_NAMES.screen], ); assertEquals(parsedFlow.transitions, [ @@ -371,29 +371,29 @@ Deno.test("FlowParser", async (t) => { "should throw an error when the XML is missing a start node", async () => { systemUnderTest = new FlowParser( - fs.readFileSync(TEST_FILES.noStartNode, ENCODING) + fs.readFileSync(TEST_FILES.noStartNode, ENCODING), ); await assertRejects( async () => await systemUnderTest.generateFlowDefinition(), Error, - ERROR_MESSAGES.flowStartNotDefined + ERROR_MESSAGES.flowStartNotDefined, ); - } + }, ); await t.step( "should throw an error when the XML contains an invalid transition", async () => { systemUnderTest = new FlowParser( - fs.readFileSync(TEST_FILES.missingTransitionNode, ENCODING) + fs.readFileSync(TEST_FILES.missingTransitionNode, ENCODING), ); await assertRejects( async () => await systemUnderTest.generateFlowDefinition(), Error, - ERROR_MESSAGES.couldNotFindConnectedNode(NON_EXISTING_ELEMENT) + ERROR_MESSAGES.couldNotFindConnectedNode(NON_EXISTING_ELEMENT), ); - } + }, ); }); diff --git a/src/test/flow_to_uml_transformer_test.ts b/src/test/flow_to_uml_transformer_test.ts index f44c018..35abecf 100644 --- a/src/test/flow_to_uml_transformer_test.ts +++ b/src/test/flow_to_uml_transformer_test.ts @@ -15,14 +15,14 @@ */ import { + assert, assertEquals, assertExists, assertStringIncludes, - assert, } from "@std/assert"; import * as fs from "node:fs"; import { Configuration, DiagramTool } from "../main/argument_processor.ts"; -import { getTestConfig } from "./argument_processor_test.ts"; +import { getTestConfig } from "./test_utils.ts"; import { FlowFileChangeDetector } from "../main/flow_file_change_detector.ts"; import { ERROR_MESSAGES, @@ -58,14 +58,14 @@ Deno.test("FlowToUmlTransformer", async (t) => { transformer = new FlowToUmlTransformer( [SAMPLE_FLOW_FILE_PATH], GENERATOR_CONTEXT, - CHANGE_DETECTOR + CHANGE_DETECTOR, ); result = await transformer.transformToUmlDiagrams(); assert( result.has(SAMPLE_FLOW_FILE_PATH), - "result should have the sample file path as a key" + "result should have the sample file path as a key", ); const flowDifference = result.get(SAMPLE_FLOW_FILE_PATH); @@ -75,7 +75,7 @@ Deno.test("FlowToUmlTransformer", async (t) => { assertStringIncludes( newUml, PLANT_UML_SIGNATURE, - "newUml should contain PLANT_UML_SIGNATURE" + "newUml should contain PLANT_UML_SIGNATURE", ); Configuration.getInstance = originalGetInstance; // Restore the original getInstance function @@ -100,7 +100,7 @@ Deno.test("FlowToUmlTransformer", async (t) => { transformer = new FlowToUmlTransformer( [fakeFilePath], GENERATOR_CONTEXT, - CHANGE_DETECTOR + CHANGE_DETECTOR, ); result = await transformer.transformToUmlDiagrams(); @@ -110,16 +110,16 @@ Deno.test("FlowToUmlTransformer", async (t) => { const expectedErrorMessage = ERROR_MESSAGES.unableToProcessFile( fakeFilePath, - new Error(XML_READER_ERROR_MESSAGES.invalidFilePath(fakeFilePath)) + new Error(XML_READER_ERROR_MESSAGES.invalidFilePath(fakeFilePath)), ); assertEquals( consoleErrorCall, expectedErrorMessage, - "console.error call should have the expected message" + "console.error call should have the expected message", ); Configuration.getInstance = originalGetInstance; // Restore the original getInstance function console.error = originalConsoleError; // Restore the original console.error - } + }, ); }); diff --git a/src/test/github_client_test.ts b/src/test/github_client_test.ts index 0a91b42..e566d3c 100644 --- a/src/test/github_client_test.ts +++ b/src/test/github_client_test.ts @@ -26,7 +26,8 @@ class MockOctokit { return { data: [ { - url: "https://api.github.com/repos/mock-owner/mock-repo/pulls/comments/1", + url: + "https://api.github.com/repos/mock-owner/mock-repo/pulls/comments/1", pull_request_review_id: 42, id: 1, node_id: "mock-node-id", @@ -57,13 +58,16 @@ class MockOctokit { author_association: "CONTRIBUTOR", _links: { self: { - href: "https://api.github.com/repos/mock-owner/mock-repo/pulls/comments/1", + href: + "https://api.github.com/repos/mock-owner/mock-repo/pulls/comments/1", }, html: { - href: "https://github.com/mock-owner/mock-repo/pull/42#discussion-1", + href: + "https://github.com/mock-owner/mock-repo/pull/42#discussion-1", }, pull_request: { - href: "https://api.github.com/repos/mock-owner/mock-repo/pulls/42", + href: + "https://api.github.com/repos/mock-owner/mock-repo/pulls/42", }, }, }, @@ -163,7 +167,7 @@ Deno.test("GithubClient", async (t) => { const mockOctokit = new MockOctokit(); const githubClient = new GithubClient( "fake-token", - invalidContext + invalidContext, ) as any; githubClient.octokit = mockOctokit; @@ -177,7 +181,7 @@ Deno.test("GithubClient", async (t) => { await assertRejects( () => githubClient.writeComment(comment), Error, - ERROR_MESSAGES.NOT_PR_CONTEXT + ERROR_MESSAGES.NOT_PR_CONTEXT, ); }); }); @@ -216,21 +220,21 @@ Deno.test("GithubClient", async (t) => { assertEquals(comments[0].body, "Test review comment"); assertEquals(comments[0].path, "test/file.ts"); assertEquals(comments[0].pull_request_review_id, 42); - } + }, ); await t.step("should throw error when not in PR context", async () => { const mockOctokit = new MockOctokit(); const githubClient = new GithubClient( "fake-token", - invalidContext + invalidContext, ) as any; githubClient.octokit = mockOctokit; await assertRejects( () => githubClient.getAllCommentsForPullRequest(), Error, - ERROR_MESSAGES.NOT_PR_CONTEXT + ERROR_MESSAGES.NOT_PR_CONTEXT, ); }); @@ -247,7 +251,7 @@ Deno.test("GithubClient", async (t) => { await assertRejects( () => githubClient.getAllCommentsForPullRequest(), Error, - ERROR_MESSAGES.FETCH_COMMENTS_FAILED("API error") + ERROR_MESSAGES.FETCH_COMMENTS_FAILED("API error"), ); }); }); @@ -274,14 +278,14 @@ Deno.test("GithubClient", async (t) => { const mockOctokit = new MockOctokit(); const githubClient = new GithubClient( "fake-token", - invalidContext + invalidContext, ) as any; githubClient.octokit = mockOctokit; await assertRejects( () => githubClient.deleteReviewComment(123), Error, - ERROR_MESSAGES.NOT_PR_CONTEXT + ERROR_MESSAGES.NOT_PR_CONTEXT, ); }); @@ -298,7 +302,7 @@ Deno.test("GithubClient", async (t) => { await assertRejects( () => githubClient.deleteReviewComment(123), Error, - ERROR_MESSAGES.DELETE_COMMENT_FAILED(123, "API error") + ERROR_MESSAGES.DELETE_COMMENT_FAILED(123, "API error"), ); }); }); diff --git a/src/test/graphviz_generator_test.ts b/src/test/graphviz_generator_test.ts index c3fc7c0..18edb3d 100644 --- a/src/test/graphviz_generator_test.ts +++ b/src/test/graphviz_generator_test.ts @@ -52,25 +52,19 @@ const NODE_NAMES = { actionCall: "myActionCall", }; -const DIFF_INDICATOR = { - ADDED: 'FONT COLOR="green">+', - DELETED: 'FONT COLOR="red">-', - MODIFIED: 'FONT COLOR="#DD7A00">Δ', -}; - function generateMockFlow(): ParsedFlow { return { start: { name: NODE_NAMES.start, } as flowTypes.FlowStart, apexPluginCalls: getFlowNodes( - NODE_NAMES.apexPluginCall + NODE_NAMES.apexPluginCall, ) as flowTypes.FlowApexPluginCall[], assignments: getFlowNodes( - NODE_NAMES.assignment + NODE_NAMES.assignment, ) as flowTypes.FlowAssignment[], collectionProcessors: getFlowNodes( - NODE_NAMES.collectionProcessor + NODE_NAMES.collectionProcessor, ) as flowTypes.FlowCollectionProcessor[], decisions: [ generateDecision(NODE_NAMES.decision), @@ -80,19 +74,19 @@ function generateMockFlow(): ParsedFlow { generateStage(NODE_NAMES.orchestratedStage, NODE_NAMES.stageSteps), ], recordCreates: getFlowNodes( - NODE_NAMES.recordCreate + NODE_NAMES.recordCreate, ) as flowTypes.FlowRecordCreate[], recordDeletes: getFlowNodes( - NODE_NAMES.recordDelete + NODE_NAMES.recordDelete, ) as flowTypes.FlowRecordDelete[], recordLookups: getFlowNodes( - NODE_NAMES.recordLookup + NODE_NAMES.recordLookup, ) as flowTypes.FlowRecordLookup[], recordRollbacks: getFlowNodes( - NODE_NAMES.recordRollback + NODE_NAMES.recordRollback, ) as flowTypes.FlowRecordRollback[], recordUpdates: getFlowNodes( - NODE_NAMES.recordUpdate + NODE_NAMES.recordUpdate, ) as flowTypes.FlowRecordUpdate[], screens: getFlowNodes(NODE_NAMES.screen) as flowTypes.FlowScreen[], steps: getFlowNodes(NODE_NAMES.step) as flowTypes.FlowStep[], @@ -100,7 +94,7 @@ function generateMockFlow(): ParsedFlow { transforms: getFlowNodes(NODE_NAMES.transform) as flowTypes.FlowTransform[], waits: getFlowNodes(NODE_NAMES.wait) as flowTypes.FlowWait[], actionCalls: getFlowNodes( - NODE_NAMES.actionCall + NODE_NAMES.actionCall, ) as flowTypes.FlowActionCall[], transitions: [ { @@ -128,7 +122,7 @@ function getFlowNodes(name: string): flowTypes.FlowNode[] { function generateStage( name: string, - stepNames: string[] + stepNames: string[], ): flowTypes.FlowOrchestratedStage { return { name: `${name}`, @@ -184,7 +178,7 @@ function generateTable( icon: Icon, skinColor: SkinColor, fontColor: string, - innerNodeBody?: string + innerNodeBody?: string, ) { const formattedInnerNodeBody = innerNodeBody ? `${EOL}${innerNodeBody}${EOL}` @@ -211,7 +205,7 @@ function generateTable( function generateInnerNodeCell( color: FontColor, expectedLabel: string, - content: string[] + content: string[], ) { return ` @@ -225,7 +219,7 @@ function generateInnerNodeCells(cells: string[]) { return cells.join(EOL); } -Deno.test("GraphViz", async (t) => { +Deno.test("GraphVizGenerator", async (t) => { let systemUnderTest: GraphVizGenerator; let mockedFlow: ParsedFlow; let result: string; @@ -262,8 +256,8 @@ Deno.test("GraphViz", async (t) => { "Apex Plugin Call", Icon.CODE, SkinColor.NONE, - FontColor.BLACK - ) + FontColor.BLACK, + ), ); }); @@ -283,8 +277,8 @@ Deno.test("GraphViz", async (t) => { "Assignment", Icon.ASSIGNMENT, SkinColor.ORANGE, - FontColor.WHITE - ) + FontColor.WHITE, + ), ); }); @@ -315,8 +309,8 @@ Deno.test("GraphViz", async (t) => { FontColor.WHITE, generateInnerNodeCell(FontColor.WHITE, "Rule myDecisionRule", [ "1. foo EqualTo true", - ]) - ) + ]), + ), ); }); @@ -354,8 +348,8 @@ Deno.test("GraphViz", async (t) => { generateInnerNodeCells([ generateInnerNodeCell(FontColor.WHITE, "Stage Step 1. step1", []), generateInnerNodeCell(FontColor.WHITE, "Stage Step 2. step2", []), - ]) - ) + ]), + ), ); }); @@ -402,7 +396,7 @@ Deno.test("GraphViz", async (t) => { result = systemUnderTest.getTransition(mockedFlow.transitions![0]); assertEquals( result, - 'FLOW_START -> myApexPluginCall [label="" color="black" style=""]' + 'FLOW_START -> myApexPluginCall [label="" color="black" style=""]', ); }); }); diff --git a/src/test/main_test.ts b/src/test/main_test.ts index 0d69a56..9ad67a7 100644 --- a/src/test/main_test.ts +++ b/src/test/main_test.ts @@ -56,6 +56,6 @@ Deno.test( assert(runner.filePathToFlowDifference.has(SAMPLE_FLOW_FILE_PATH)); assertExists(runner.filePathToFlowDifference.get(SAMPLE_FLOW_FILE_PATH)); - Deno.remove(path.join(TEST_UNDECLARED_OUTPUTS_DIR, "test.json")); - } + await Deno.remove(path.join(TEST_UNDECLARED_OUTPUTS_DIR, "test.json")); + }, ); diff --git a/src/test/mermaid_generator_test.ts b/src/test/mermaid_generator_test.ts index ce48a41..8953ec9 100644 --- a/src/test/mermaid_generator_test.ts +++ b/src/test/mermaid_generator_test.ts @@ -53,13 +53,13 @@ function generateMockFlow(): ParsedFlow { name: NODE_NAMES.start, } as flowTypes.FlowStart, apexPluginCalls: getFlowNodes( - NODE_NAMES.apexPluginCall + NODE_NAMES.apexPluginCall, ) as flowTypes.FlowApexPluginCall[], assignments: getFlowNodes( - NODE_NAMES.assignment + NODE_NAMES.assignment, ) as flowTypes.FlowAssignment[], collectionProcessors: getFlowNodes( - NODE_NAMES.collectionProcessor + NODE_NAMES.collectionProcessor, ) as flowTypes.FlowCollectionProcessor[], decisions: [ generateDecision(NODE_NAMES.decision), @@ -69,19 +69,19 @@ function generateMockFlow(): ParsedFlow { generateStage(NODE_NAMES.orchestratedStage, NODE_NAMES.stageSteps), ], recordCreates: getFlowNodes( - NODE_NAMES.recordCreate + NODE_NAMES.recordCreate, ) as flowTypes.FlowRecordCreate[], recordDeletes: getFlowNodes( - NODE_NAMES.recordDelete + NODE_NAMES.recordDelete, ) as flowTypes.FlowRecordDelete[], recordLookups: getFlowNodes( - NODE_NAMES.recordLookup + NODE_NAMES.recordLookup, ) as flowTypes.FlowRecordLookup[], recordRollbacks: getFlowNodes( - NODE_NAMES.recordRollback + NODE_NAMES.recordRollback, ) as flowTypes.FlowRecordRollback[], recordUpdates: getFlowNodes( - NODE_NAMES.recordUpdate + NODE_NAMES.recordUpdate, ) as flowTypes.FlowRecordUpdate[], screens: getFlowNodes(NODE_NAMES.screen) as flowTypes.FlowScreen[], steps: getFlowNodes(NODE_NAMES.step) as flowTypes.FlowStep[], @@ -89,7 +89,7 @@ function generateMockFlow(): ParsedFlow { transforms: getFlowNodes(NODE_NAMES.transform) as flowTypes.FlowTransform[], waits: getFlowNodes(NODE_NAMES.wait) as flowTypes.FlowWait[], actionCalls: getFlowNodes( - NODE_NAMES.actionCall + NODE_NAMES.actionCall, ) as flowTypes.FlowActionCall[], transitions: [ { @@ -119,7 +119,7 @@ function getFlowNodes(name: string): flowTypes.FlowNode[] { function generateStage( name: string, - stepNames: string[] + stepNames: string[], ): flowTypes.FlowOrchestratedStage { return { name: `${name}`, @@ -279,7 +279,7 @@ Deno.test("MermaidGenerator", async (t) => { result = systemUnderTest.toUmlString(node); assertStringIncludes( result, - "+" + "+", ); assertStringIncludes(result, "class myNode pink"); }); @@ -341,11 +341,11 @@ Deno.test("MermaidGenerator", async (t) => { assertStringIncludes(result, "FLOW_START --> myApexPluginCall"); assertStringIncludes( result, - "myApexPluginCall --> myAssignment : Normal Transition " + "myApexPluginCall --> myAssignment : Normal Transition ", ); assertStringIncludes( result, - "myAssignment --> myDecision : ❌ Error Path ❌" + "myAssignment --> myDecision : ❌ Error Path ❌", ); assertStringIncludes(result, "class"); diff --git a/src/test/plantuml_generator_test.ts b/src/test/plantuml_generator_test.ts index 559f020..068399c 100644 --- a/src/test/plantuml_generator_test.ts +++ b/src/test/plantuml_generator_test.ts @@ -52,13 +52,13 @@ function generateMockFlow(): ParsedFlow { name: NODE_NAMES.start, } as flowTypes.FlowStart, apexPluginCalls: getFlowNodes( - NODE_NAMES.apexPluginCall + NODE_NAMES.apexPluginCall, ) as flowTypes.FlowApexPluginCall[], assignments: getFlowNodes( - NODE_NAMES.assignment + NODE_NAMES.assignment, ) as flowTypes.FlowAssignment[], collectionProcessors: getFlowNodes( - NODE_NAMES.collectionProcessor + NODE_NAMES.collectionProcessor, ) as flowTypes.FlowCollectionProcessor[], decisions: getFlowNodes(NODE_NAMES.decision) as flowTypes.FlowDecision[], loops: getFlowNodes(NODE_NAMES.loop) as flowTypes.FlowLoop[], @@ -66,19 +66,19 @@ function generateMockFlow(): ParsedFlow { generateStage(NODE_NAMES.orchestratedStage, NODE_NAMES.stageSteps), ], recordCreates: getFlowNodes( - NODE_NAMES.recordCreate + NODE_NAMES.recordCreate, ) as flowTypes.FlowRecordCreate[], recordDeletes: getFlowNodes( - NODE_NAMES.recordDelete + NODE_NAMES.recordDelete, ) as flowTypes.FlowRecordDelete[], recordLookups: getFlowNodes( - NODE_NAMES.recordLookup + NODE_NAMES.recordLookup, ) as flowTypes.FlowRecordLookup[], recordRollbacks: getFlowNodes( - NODE_NAMES.recordRollback + NODE_NAMES.recordRollback, ) as flowTypes.FlowRecordRollback[], recordUpdates: getFlowNodes( - NODE_NAMES.recordUpdate + NODE_NAMES.recordUpdate, ) as flowTypes.FlowRecordUpdate[], screens: getFlowNodes(NODE_NAMES.screen) as flowTypes.FlowScreen[], steps: getFlowNodes(NODE_NAMES.step) as flowTypes.FlowStep[], @@ -86,7 +86,7 @@ function generateMockFlow(): ParsedFlow { transforms: getFlowNodes(NODE_NAMES.transform) as flowTypes.FlowTransform[], waits: getFlowNodes(NODE_NAMES.wait) as flowTypes.FlowWait[], actionCalls: getFlowNodes( - NODE_NAMES.actionCall + NODE_NAMES.actionCall, ) as flowTypes.FlowActionCall[], transitions: [ { @@ -114,7 +114,7 @@ function getFlowNodes(name: string): flowTypes.FlowNode[] { function generateStage( name: string, - stepNames: string[] + stepNames: string[], ): flowTypes.FlowOrchestratedStage { return { name: `${name}`, @@ -136,7 +136,7 @@ function generateStage( } as flowTypes.FlowOrchestratedStage; } -Deno.test("PlantUml", async (t) => { +Deno.test("PlantUmlGenerator", async (t) => { let systemUnderTest: PlantUmlGenerator; let mockedFlow: ParsedFlow; let result: string; @@ -171,7 +171,7 @@ Deno.test("PlantUml", async (t) => { result = systemUnderTest.toUmlString(node); assertEquals( result, - 'state "**Apex Plugin Call** <&code> \\n myApexPluginCall" as myApexPluginCall' + 'state "**Apex Plugin Call** <&code> \\n myApexPluginCall" as myApexPluginCall', ); }); @@ -186,7 +186,7 @@ Deno.test("PlantUml", async (t) => { result = systemUnderTest.toUmlString(node); assertEquals( result, - 'state "**Assignment** <&menu> \\n myAssignment" as myAssignment <>' + 'state "**Assignment** <&menu> \\n myAssignment" as myAssignment <>', ); }); @@ -201,7 +201,7 @@ Deno.test("PlantUml", async (t) => { result = systemUnderTest.toUmlString(node); assertEquals( result, - 'state "**Decision** <&fork> \\n myDecision" as myDecision <>' + 'state "**Decision** <&fork> \\n myDecision" as myDecision <>', ); }); @@ -235,7 +235,7 @@ Deno.test("PlantUml", async (t) => { "myOrchestratedStage: __step1__\n" + "myOrchestratedStage: ---\n" + "myOrchestratedStage: **Stage Step**\n" + - "myOrchestratedStage: __step2__" + "myOrchestratedStage: __step2__", ); }); @@ -251,7 +251,7 @@ Deno.test("PlantUml", async (t) => { result = systemUnderTest.toUmlString(node); assertEquals( result, - 'state "**<&plus{scale=2}>** **Record Create** <&medical-cross> \\n myNode" as myNode <> <>' + 'state "**<&plus{scale=2}>** **Record Create** <&medical-cross> \\n myNode" as myNode <> <>', ); }); diff --git a/src/test/test_utils.ts b/src/test/test_utils.ts new file mode 100644 index 0000000..46df948 --- /dev/null +++ b/src/test/test_utils.ts @@ -0,0 +1,36 @@ +/** + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { + DiagramTool, + Mode, + RuntimeConfig, +} from "../main/argument_processor.ts"; + +/** + * The test configuration object that is used by the tests. + */ +export function getTestConfig(): RuntimeConfig { + return { + diagramTool: DiagramTool.PLANTUML, + filePath: [], + gitDiffFromHash: "HEAD~", + gitDiffToHash: "HEAD", + outputDirectory: "/", + outputFileName: "test", + mode: Mode.JSON, + }; +} diff --git a/src/test/uml_generator_context_test.ts b/src/test/uml_generator_context_test.ts index 04c6060..b225ea5 100644 --- a/src/test/uml_generator_context_test.ts +++ b/src/test/uml_generator_context_test.ts @@ -35,6 +35,6 @@ Deno.test("UmlGeneratorContext", async (t) => { let generatorContext = new UmlGeneratorContext("fooBar" as DiagramTool); const diagram = generatorContext.generateDiagram(parsedFlow); assertStringIncludes(diagram, PLANT_UML_SIGNATURE); - } + }, ); }); diff --git a/src/test/uml_generator_test.ts b/src/test/uml_generator_test.ts index 38d5681..f19c712 100644 --- a/src/test/uml_generator_test.ts +++ b/src/test/uml_generator_test.ts @@ -17,7 +17,7 @@ import { assertEquals } from "@std/assert"; import { ParsedFlow, Transition } from "../main/flow_parser.ts"; import * as flowTypes from "../main/flow_types.ts"; -import { UmlGenerator, DiagramNode } from "../main/uml_generator.ts"; +import { DiagramNode, UmlGenerator } from "../main/uml_generator.ts"; const EOL = Deno.build.os === "windows" ? "\r\n" : "\n"; const TRANSITION_ARROW = "-->"; @@ -53,13 +53,15 @@ const UML_REPRESENTATIONS = { orchestratedStage: (name: string) => `state Orchestrated Stage ${name}${EOL}`, recordCreate: (name: string) => `state Record Create ${name}`, recordDelete: (name: string) => `state Record Delete ${name}`, - recordLookup: (name: string) => `state Record Lookup ${name} + recordLookup: (name: string) => + `state Record Lookup ${name} sObject: Account Fields Queried: all Filter Logic: None Limit: All Records`, recordRollback: (name: string) => `state Record Rollback ${name}`, - recordUpdate: (name: string) => `state Record Update ${name} + recordUpdate: (name: string) => + `state Record Update ${name} Direct Update: sObject: Account `, screen: (name: string) => `state Screen ${name}`, @@ -78,33 +80,33 @@ function generateMockFlow() { name: NODE_NAMES.start, } as flowTypes.FlowStart, apexPluginCalls: getFlowNodes( - NODE_NAMES.apexPluginCall + NODE_NAMES.apexPluginCall, ) as flowTypes.FlowApexPluginCall[], assignments: getFlowNodes( - NODE_NAMES.assignment + NODE_NAMES.assignment, ) as flowTypes.FlowAssignment[], collectionProcessors: getFlowNodes( - NODE_NAMES.collectionProcessor + NODE_NAMES.collectionProcessor, ) as flowTypes.FlowCollectionProcessor[], decisions: getFlowNodes(NODE_NAMES.decision) as flowTypes.FlowDecision[], loops: getFlowNodes(NODE_NAMES.loop) as flowTypes.FlowLoop[], orchestratedStages: getFlowNodes( - NODE_NAMES.orchestratedStage + NODE_NAMES.orchestratedStage, ) as flowTypes.FlowOrchestratedStage[], recordCreates: getFlowNodes( - NODE_NAMES.recordCreate + NODE_NAMES.recordCreate, ) as flowTypes.FlowRecordCreate[], recordDeletes: getFlowNodes( - NODE_NAMES.recordDelete + NODE_NAMES.recordDelete, ) as flowTypes.FlowRecordDelete[], recordLookups: getFlowNodes( - NODE_NAMES.recordLookup + NODE_NAMES.recordLookup, ) as flowTypes.FlowRecordLookup[], recordRollbacks: getFlowNodes( - NODE_NAMES.recordRollback + NODE_NAMES.recordRollback, ) as flowTypes.FlowRecordRollback[], recordUpdates: getFlowNodes( - NODE_NAMES.recordUpdate + NODE_NAMES.recordUpdate, ) as flowTypes.FlowRecordUpdate[], screens: getFlowNodes(NODE_NAMES.screen) as flowTypes.FlowScreen[], steps: getFlowNodes(NODE_NAMES.step) as flowTypes.FlowStep[], @@ -112,7 +114,7 @@ function generateMockFlow() { transforms: getFlowNodes(NODE_NAMES.transform) as flowTypes.FlowTransform[], waits: getFlowNodes(NODE_NAMES.wait) as flowTypes.FlowWait[], actionCalls: getFlowNodes( - NODE_NAMES.actionCall + NODE_NAMES.actionCall, ) as flowTypes.FlowActionCall[], transitions: [ { @@ -257,15 +259,15 @@ Deno.test("UmlGenerator", async (t) => { UML_REPRESENTATIONS.actionCall(NODE_NAMES.actionCall), UML_REPRESENTATIONS.transition( NODE_NAMES.start, - NODE_NAMES.apexPluginCall + NODE_NAMES.apexPluginCall, ), UML_REPRESENTATIONS.transition( NODE_NAMES.apexPluginCall, - NODE_NAMES.assignment + NODE_NAMES.assignment, ), UML_REPRESENTATIONS.transition( NODE_NAMES.assignment, - NODE_NAMES.collectionProcessor + NODE_NAMES.collectionProcessor, ), ].join(EOL); @@ -279,7 +281,7 @@ Deno.test("UmlGenerator", async (t) => { assertEquals( uml.includes(UML_REPRESENTATIONS.screen(NODE_NAMES.screen)), - false + false, ); }); @@ -290,7 +292,7 @@ Deno.test("UmlGenerator", async (t) => { assertEquals( uml.includes(UML_REPRESENTATIONS.screen(NODE_NAMES.screen)), - false + false, ); }); @@ -353,10 +355,10 @@ Deno.test("UmlGenerator", async (t) => { assertEquals( uml.includes(content), true, - `Expected UML: ${uml} to contain: ${content}` + `Expected UML: ${uml} to contain: ${content}`, ); }); - } + }, ); await t.step( @@ -389,10 +391,10 @@ Deno.test("UmlGenerator", async (t) => { assertEquals( uml.includes(content), true, - `Expected UML: ${uml} to contain: ${content}` + `Expected UML: ${uml} to contain: ${content}`, ); }); - } + }, ); await t.step( @@ -451,10 +453,10 @@ Deno.test("UmlGenerator", async (t) => { assertEquals( uml.includes(content), true, - `Expected UML: ${uml} to contain: ${content}` + `Expected UML: ${uml} to contain: ${content}`, ); }); - } + }, ); await t.step( @@ -482,10 +484,10 @@ Deno.test("UmlGenerator", async (t) => { assertEquals( uml.includes(content), true, - `Expected UML: ${uml} to contain: ${content}` + `Expected UML: ${uml} to contain: ${content}`, ); }); - } + }, ); await t.step( @@ -534,9 +536,9 @@ Deno.test("UmlGenerator", async (t) => { assertEquals( uml.includes(content), true, - `Expected UML: ${uml} to contain: ${content}` + `Expected UML: ${uml} to contain: ${content}`, ); }); - } + }, ); }); diff --git a/src/test/uml_writer_test.ts b/src/test/uml_writer_test.ts index 0ec120c..0da1243 100644 --- a/src/test/uml_writer_test.ts +++ b/src/test/uml_writer_test.ts @@ -15,7 +15,7 @@ */ import { assertEquals, assertExists } from "@std/assert"; -import { spy, assertSpyCalls } from "@std/testing/mock"; +import { assertSpyCalls, spy } from "@std/testing/mock"; import * as fs from "node:fs"; import * as path from "node:path"; import { @@ -65,12 +65,12 @@ const EXPECTED_DEFAULT_FORMAT = [ const expectedFilePath = path.join( TEST_UNDECLARED_OUTPUTS_DIR, - `${OUTPUT_FILE_NAME}.json` + `${OUTPUT_FILE_NAME}.json`, ); function getRuntimeConfig( diagramTool: DiagramTool = DiagramTool.PLANTUML, - mode: Mode = Mode.JSON + mode: Mode = Mode.JSON, ): RuntimeConfig { return { diagramTool, @@ -110,7 +110,7 @@ Deno.test("UmlWriter", async (t) => { path: filePath, subject_type: "file", body: _body, - }) + }), ), // Add the new methods getAllCommentsForPullRequest: spy(async () => { @@ -137,7 +137,7 @@ Deno.test("UmlWriter", async (t) => { writer = new UmlWriter( FILE_PATH_TO_FLOW_DIFFERENCE, - mockGithubClient as unknown as GithubClient + mockGithubClient as unknown as GithubClient, ); await writer.writeUmlDiagrams(); // Make this await the async operation @@ -151,11 +151,11 @@ Deno.test("UmlWriter", async (t) => { // Verify that the methods were called the expected number of times assertSpyCalls( mockGithubClient.translateToComment, - FILE_PATH_TO_FLOW_DIFFERENCE.size + FILE_PATH_TO_FLOW_DIFFERENCE.size, ); assertSpyCalls( mockGithubClient.writeComment, - FILE_PATH_TO_FLOW_DIFFERENCE.size + FILE_PATH_TO_FLOW_DIFFERENCE.size, ); // Verify the content of the calls diff --git a/src/test/xml_reader_test.ts b/src/test/xml_reader_test.ts index 68427d9..cc929bb 100644 --- a/src/test/xml_reader_test.ts +++ b/src/test/xml_reader_test.ts @@ -24,7 +24,7 @@ const VALID_FILE_PATH = path.join(TEST_UNDECLARED_OUTPUTS_DIR, "test.xml"); const INVALID_FILE_PATH = "nonexistent.xml"; const INVALID_FILE_EXTENSION_PATH = path.join( TEST_UNDECLARED_OUTPUTS_DIR, - "test.txt" + "test.txt", ); const XML_CONTENT = ""; @@ -35,13 +35,13 @@ Deno.test("XmlReader", async (t) => { async () => { await Deno.writeFile( VALID_FILE_PATH, - new TextEncoder().encode(XML_CONTENT) + new TextEncoder().encode(XML_CONTENT), ); const xmlReader = new XmlReader(VALID_FILE_PATH); const xmlContent = xmlReader.getXmlFileBody(); assertEquals(xmlContent, XML_CONTENT); await Deno.remove(VALID_FILE_PATH); - } + }, ); await t.step( @@ -50,13 +50,13 @@ Deno.test("XmlReader", async (t) => { const xmlFilePath = path.join(TEST_UNDECLARED_OUTPUTS_DIR, "test.XmL"); await Deno.writeFile( xmlFilePath, - new TextEncoder().encode(XML_CONTENT) + new TextEncoder().encode(XML_CONTENT), ); const xmlReader = new XmlReader(xmlFilePath); const xmlContent = xmlReader.getXmlFileBody(); assertEquals(xmlContent, XML_CONTENT); await Deno.remove(xmlFilePath); - } + }, ); await t.step( @@ -66,9 +66,9 @@ Deno.test("XmlReader", async (t) => { assertThrows( () => xmlReader.getXmlFileBody(), Error, - ERROR_MESSAGES.invalidFilePath(INVALID_FILE_PATH) + ERROR_MESSAGES.invalidFilePath(INVALID_FILE_PATH), ); - } + }, ); await t.step( @@ -76,16 +76,16 @@ Deno.test("XmlReader", async (t) => { async () => { await Deno.writeFile( INVALID_FILE_EXTENSION_PATH, - new TextEncoder().encode(XML_CONTENT) + new TextEncoder().encode(XML_CONTENT), ); const xmlReader = new XmlReader(INVALID_FILE_EXTENSION_PATH); assertThrows( () => xmlReader.getXmlFileBody(), Error, - ERROR_MESSAGES.invalidFileExtension(INVALID_FILE_EXTENSION_PATH) + ERROR_MESSAGES.invalidFileExtension(INVALID_FILE_EXTENSION_PATH), ); await Deno.remove(INVALID_FILE_EXTENSION_PATH); - } + }, ); }); });