From cfe2be23672702ca518052e916831fe16e24421b Mon Sep 17 00:00:00 2001 From: Lennart Becker Date: Thu, 3 Jul 2025 13:55:30 +0000 Subject: [PATCH 01/10] refactor: Adaption to codethink -Rename and sort the files according to Safety-Monitor example --- .../TA-BEHAVIOURS.rst | 0 .../TA-MISBEHAVIOURS.rst | 0 .../TA-TESTS.rst | 0 docs/trustable/report.rst | 18 +++++++++--------- .../{TT-CHANGES/index.rst => TT-CHANGES.rst} | 0 .../index.rst => TT-CONFIDENCE.rst} | 0 .../index.rst => TT-CONSTRUCTION.rst} | 2 +- .../index.rst => TT-EXPECTATIONS.rst} | 4 ++-- .../index.rst => TT-PROVENANCE.rst} | 0 .../{TT-RESULTS/index.rst => TT-RESULTS.rst} | 0 docs/trustable/tenets/index.rst | 12 ++++++------ 11 files changed, 18 insertions(+), 18 deletions(-) rename docs/trustable/{tenets/TT-EXPECTATIONS => assertions}/TA-BEHAVIOURS.rst (100%) rename docs/trustable/{tenets/TT-EXPECTATIONS => assertions}/TA-MISBEHAVIOURS.rst (100%) rename docs/trustable/{tenets/TT-CONSTRUCTION => assertions}/TA-TESTS.rst (100%) rename docs/trustable/tenets/{TT-CHANGES/index.rst => TT-CHANGES.rst} (100%) rename docs/trustable/tenets/{TT-CONFIDENCE/index.rst => TT-CONFIDENCE.rst} (100%) rename docs/trustable/tenets/{TT-CONSTRUCTION/index.rst => TT-CONSTRUCTION.rst} (96%) rename docs/trustable/tenets/{TT-EXPECTATIONS/index.rst => TT-EXPECTATIONS.rst} (91%) rename docs/trustable/tenets/{TT-PROVENANCE/index.rst => TT-PROVENANCE.rst} (100%) rename docs/trustable/tenets/{TT-RESULTS/index.rst => TT-RESULTS.rst} (100%) diff --git a/docs/trustable/tenets/TT-EXPECTATIONS/TA-BEHAVIOURS.rst b/docs/trustable/assertions/TA-BEHAVIOURS.rst similarity index 100% rename from docs/trustable/tenets/TT-EXPECTATIONS/TA-BEHAVIOURS.rst rename to docs/trustable/assertions/TA-BEHAVIOURS.rst diff --git a/docs/trustable/tenets/TT-EXPECTATIONS/TA-MISBEHAVIOURS.rst b/docs/trustable/assertions/TA-MISBEHAVIOURS.rst similarity index 100% rename from docs/trustable/tenets/TT-EXPECTATIONS/TA-MISBEHAVIOURS.rst rename to docs/trustable/assertions/TA-MISBEHAVIOURS.rst diff --git a/docs/trustable/tenets/TT-CONSTRUCTION/TA-TESTS.rst b/docs/trustable/assertions/TA-TESTS.rst similarity index 100% rename from docs/trustable/tenets/TT-CONSTRUCTION/TA-TESTS.rst rename to docs/trustable/assertions/TA-TESTS.rst diff --git a/docs/trustable/report.rst b/docs/trustable/report.rst index 09e8dab..f2e363e 100644 --- a/docs/trustable/report.rst +++ b/docs/trustable/report.rst @@ -32,7 +32,7 @@ Compliance for TA * - TA-ANALYSIS - Collected data from tests and monitoring of deployed software is analysed according to specified objectives. - 0.00 - * - :doc:`tenets/TT-EXPECTATIONS/TA-BEHAVIOURS` + * - :doc:`assertions/TA-BEHAVIOURS` - Expected or required behaviours for json library are identified, specified, verified and validated based on analysis. - 0.00 * - TA-CONFIDENCE @@ -59,7 +59,7 @@ Compliance for TA * - TA-METHODOLOGIES - Manual methodologies applied for json library by contributors, and their results, are managed according to specified objectives. - 0.00 - * - :doc:`tenets/TT-EXPECTATIONS/TA-MISBEHAVIOURS` + * - :doc:`assertions/TA-MISBEHAVIOURS` - Prohibited misbehaviours for json library are identified, and mitigations are specified, verified and validated based on analysis. - 0.00 * - TA-RELEASES @@ -68,7 +68,7 @@ Compliance for TA * - TA-SUPPLY_CHAIN - All sources for json library and tools are mirrored in our controlled environment. - 0.00 - * - :doc:`tenets/TT-CONSTRUCTION/TA-TESTS` + * - :doc:`assertions/TA-TESTS` - All tests for json library, and its build and test environments, are constructed from controlled/mirrored sources and are reproducible, with any exceptions documented. - 0.00 * - TA-UPDATES @@ -102,21 +102,21 @@ Compliance for TT * - Item - Summary - Score - * - :doc:`tenets/TT-CHANGES/index` + * - :doc:`tenets/TT-CHANGES` - Json library is actively maintained, with regular updates to dependencies, and changes are verified to prevent regressions. - 0.00 - * - :doc:`tenets/TT-CONFIDENCE/index` + * - :doc:`tenets/TT-CONFIDENCE` - Confidence in json library is measured by analysing actual performance in tests and in production. - 0.00 - * - :doc:`tenets/TT-CONSTRUCTION/index` + * - :doc:`tenets/TT-CONSTRUCTION` - Tools are provided to build json library from trusted sources (also provided) with full reproducibility. - 0.00 - * - :doc:`tenets/TT-EXPECTATIONS/index` + * - :doc:`tenets/TT-EXPECTATIONS` - Documentation is provided, specifying what json library is expected to do, and what it must not do, and how this is verified. - 0.00 - * - :doc:`tenets/TT-PROVENANCE/index` + * - :doc:`tenets/TT-PROVENANCE` - All inputs (and attestations for claims) for json library are provided with known provenance. - 0.00 - * - :doc:`tenets/TT-RESULTS/index` + * - :doc:`tenets/TT-RESULTS` - Evidence is provided to demonstrate that json library does what it is supposed to do, and does not do what it must not do. - 0.00 diff --git a/docs/trustable/tenets/TT-CHANGES/index.rst b/docs/trustable/tenets/TT-CHANGES.rst similarity index 100% rename from docs/trustable/tenets/TT-CHANGES/index.rst rename to docs/trustable/tenets/TT-CHANGES.rst diff --git a/docs/trustable/tenets/TT-CONFIDENCE/index.rst b/docs/trustable/tenets/TT-CONFIDENCE.rst similarity index 100% rename from docs/trustable/tenets/TT-CONFIDENCE/index.rst rename to docs/trustable/tenets/TT-CONFIDENCE.rst diff --git a/docs/trustable/tenets/TT-CONSTRUCTION/index.rst b/docs/trustable/tenets/TT-CONSTRUCTION.rst similarity index 96% rename from docs/trustable/tenets/TT-CONSTRUCTION/index.rst rename to docs/trustable/tenets/TT-CONSTRUCTION.rst index d7e7043..85a7f98 100644 --- a/docs/trustable/tenets/TT-CONSTRUCTION/index.rst +++ b/docs/trustable/tenets/TT-CONSTRUCTION.rst @@ -22,4 +22,4 @@ TT-CONSTRUCTION :caption: TT-CONSTRUCTION :glob: - TA-TESTS + ../assertions/TA-TESTS diff --git a/docs/trustable/tenets/TT-EXPECTATIONS/index.rst b/docs/trustable/tenets/TT-EXPECTATIONS.rst similarity index 91% rename from docs/trustable/tenets/TT-EXPECTATIONS/index.rst rename to docs/trustable/tenets/TT-EXPECTATIONS.rst index 5dad834..22152c9 100644 --- a/docs/trustable/tenets/TT-EXPECTATIONS/index.rst +++ b/docs/trustable/tenets/TT-EXPECTATIONS.rst @@ -22,5 +22,5 @@ TT-EXPECTATIONS :caption: TT-EXPECTATIONS :glob: - TA-BEHAVIOURS - TA-MISBEHAVIOURS + ../assertions/TA-BEHAVIOURS + ../assertions/TA-MISBEHAVIOURS diff --git a/docs/trustable/tenets/TT-PROVENANCE/index.rst b/docs/trustable/tenets/TT-PROVENANCE.rst similarity index 100% rename from docs/trustable/tenets/TT-PROVENANCE/index.rst rename to docs/trustable/tenets/TT-PROVENANCE.rst diff --git a/docs/trustable/tenets/TT-RESULTS/index.rst b/docs/trustable/tenets/TT-RESULTS.rst similarity index 100% rename from docs/trustable/tenets/TT-RESULTS/index.rst rename to docs/trustable/tenets/TT-RESULTS.rst diff --git a/docs/trustable/tenets/index.rst b/docs/trustable/tenets/index.rst index 9a58c5b..c63d486 100644 --- a/docs/trustable/tenets/index.rst +++ b/docs/trustable/tenets/index.rst @@ -22,9 +22,9 @@ Trustable Tenets :caption: Trustable Tenets :glob: - TT-PROVENANCE/index - TT-CONSTRUCTION/index - TT-CHANGES/index - TT-EXPECTATIONS/index - TT-RESULTS/index - TT-CONFIDENCE/index + TT-PROVENANCE + TT-CONSTRUCTION + TT-CHANGES + TT-EXPECTATIONS + TT-RESULTS + TT-CONFIDENCE From 3676b5e8f4d5ce9f8ccf01f0260a3985f9ec1923 Mon Sep 17 00:00:00 2001 From: Lennart Becker Date: Thu, 3 Jul 2025 13:56:41 +0000 Subject: [PATCH 02/10] setup: Neglect build folder in gitignore -Add build folder in gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 08cd7a3..15bb129 100644 --- a/.gitignore +++ b/.gitignore @@ -42,6 +42,7 @@ user.bazelrc # docs:incremental and docs:ide_support build artifacts /_build +/build # Vale - editorial style guide .vale.ini From 4ea2ba36c46cefaff9da0e25a4e83de79c9abfb2 Mon Sep 17 00:00:00 2001 From: Lennart Becker Date: Thu, 3 Jul 2025 13:57:35 +0000 Subject: [PATCH 03/10] trudag: Add trudag in Dockerfile -Add trudag in Dockerfile for TSF application --- .devcontainer/S-CORE/Dockerfile | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.devcontainer/S-CORE/Dockerfile b/.devcontainer/S-CORE/Dockerfile index 5b8590c..20eefef 100644 --- a/.devcontainer/S-CORE/Dockerfile +++ b/.devcontainer/S-CORE/Dockerfile @@ -32,7 +32,7 @@ RUN apt-get update && \ python3 \ python3-pip \ python3-venv \ - # pipx \ + pipx \ locales \ ssh-client \ && apt-get clean && \ @@ -85,3 +85,7 @@ RUN groupadd --gid $USER_GID $USERNAME \ # Default user USER $USERNAME + +# Install trudag using pipx +RUN pipx install trustable --index-url https://gitlab.com/api/v4/projects/66600816/packages/pypi/simple && \ + pipx ensurepath From 5436fcbb2fa30a47cbde697d693848a5321160bb Mon Sep 17 00:00:00 2001 From: Lennart Becker Date: Thu, 3 Jul 2025 13:58:21 +0000 Subject: [PATCH 04/10] setup: Initial trudag setup -Add .dotstop.dot file for digraph --- .dotstop.dot | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .dotstop.dot diff --git a/.dotstop.dot b/.dotstop.dot new file mode 100644 index 0000000..b62953b --- /dev/null +++ b/.dotstop.dot @@ -0,0 +1,6 @@ +# This file is automatically generated by dotstop and should not be edited manually. +# Generated using trustable 2025.6.25. + +digraph G{ + +} From b196b51fed07efad46caf2013a819c4f1132bedb Mon Sep 17 00:00:00 2001 From: Lennart Becker Date: Thu, 3 Jul 2025 14:42:47 +0000 Subject: [PATCH 05/10] setup: Add tenets and assertions to trudag -Add 16 assertions and 6 tenets to trudag -Update .dotstop.dot -Add links between corresponding tenets and assertions --- .dotstop.dot | 48 ++++++++++++++++++- docs/trustable/TRUSTABLE-SOFTWARE.md | 5 ++ docs/trustable/assertions/TA-ANALYSIS.md | 5 ++ docs/trustable/assertions/TA-BEHAVIOURS.md | 5 ++ docs/trustable/assertions/TA-CONFIDENCE.md | 5 ++ docs/trustable/assertions/TA-CONSTRAINTS.md | 5 ++ docs/trustable/assertions/TA-DATA.md | 5 ++ docs/trustable/assertions/TA-FIXES.md | 5 ++ docs/trustable/assertions/TA-INDICATORS.md | 5 ++ docs/trustable/assertions/TA-INPUTS.md | 5 ++ docs/trustable/assertions/TA-ITERATIONS.md | 5 ++ docs/trustable/assertions/TA-METHODOLOGIES.md | 5 ++ docs/trustable/assertions/TA-MISBEHAVIOURS.md | 5 ++ docs/trustable/assertions/TA-RELEASES.md | 5 ++ docs/trustable/assertions/TA-SUPPLY_CHAIN.md | 5 ++ docs/trustable/assertions/TA-TESTS.md | 5 ++ docs/trustable/assertions/TA-UPDATES.md | 5 ++ docs/trustable/assertions/TA-VALIDATION.md | 5 ++ docs/trustable/tenets/TT-CHANGES.md | 5 ++ docs/trustable/tenets/TT-CONFIDENCE.md | 5 ++ docs/trustable/tenets/TT-CONSTRUCTION.md | 5 ++ docs/trustable/tenets/TT-EXPECTATIONS.md | 5 ++ docs/trustable/tenets/TT-PROVENANCE.md | 5 ++ docs/trustable/tenets/TT-RESULTS.md | 5 ++ 24 files changed, 161 insertions(+), 2 deletions(-) create mode 100644 docs/trustable/TRUSTABLE-SOFTWARE.md create mode 100644 docs/trustable/assertions/TA-ANALYSIS.md create mode 100644 docs/trustable/assertions/TA-BEHAVIOURS.md create mode 100644 docs/trustable/assertions/TA-CONFIDENCE.md create mode 100644 docs/trustable/assertions/TA-CONSTRAINTS.md create mode 100644 docs/trustable/assertions/TA-DATA.md create mode 100644 docs/trustable/assertions/TA-FIXES.md create mode 100644 docs/trustable/assertions/TA-INDICATORS.md create mode 100644 docs/trustable/assertions/TA-INPUTS.md create mode 100644 docs/trustable/assertions/TA-ITERATIONS.md create mode 100644 docs/trustable/assertions/TA-METHODOLOGIES.md create mode 100644 docs/trustable/assertions/TA-MISBEHAVIOURS.md create mode 100644 docs/trustable/assertions/TA-RELEASES.md create mode 100644 docs/trustable/assertions/TA-SUPPLY_CHAIN.md create mode 100644 docs/trustable/assertions/TA-TESTS.md create mode 100644 docs/trustable/assertions/TA-UPDATES.md create mode 100644 docs/trustable/assertions/TA-VALIDATION.md create mode 100644 docs/trustable/tenets/TT-CHANGES.md create mode 100644 docs/trustable/tenets/TT-CONFIDENCE.md create mode 100644 docs/trustable/tenets/TT-CONSTRUCTION.md create mode 100644 docs/trustable/tenets/TT-EXPECTATIONS.md create mode 100644 docs/trustable/tenets/TT-PROVENANCE.md create mode 100644 docs/trustable/tenets/TT-RESULTS.md diff --git a/.dotstop.dot b/.dotstop.dot index b62953b..e9d6778 100644 --- a/.dotstop.dot +++ b/.dotstop.dot @@ -1,6 +1,50 @@ # This file is automatically generated by dotstop and should not be edited manually. # Generated using trustable 2025.6.25. -digraph G{ - +digraph G { +"TT-CHANGES" [sha=b86f778a375ffce19e860dbb0a896b49b43a6da679d6a5dc959dfeab9ac3b9af]; +"TA-ANALYSIS" [sha="4eff9d0fd4e4abef6b94a0bcd942a963cef8acbba70f87df701f464ef881c952"]; +"TA-BEHAVIOURS" [sha="2a489bdfa94ff7ddfdc93f8b984489482d9f5c81298d6c1184f01498c2119850"]; +"TA-CONFIDENCE" [sha=ab14eaafcab5cf364c172b8e3cab184b836c51bcf48e114424adcd371a00d9af]; +"TA-CONSTRAINTS" [sha=f31363168e413655641cddf5e4b9cc21a149045a1195a46bf8e49cedaaf8748d]; +"TA-DATA" [sha="6534491ae483042ee384e986d82c60f436f4ba388821fc0ee86d730a0afc19d7"]; +"TA-FIXES" [sha="49ff8de3c307b3763b3face96ce7463bfe7e99e1026cbe7a9d400370611984a2"]; +"TA-INDICATORS" [sha="4aea77126df3f0a5418a42bd05ebb091b2327e84b7e7c95e47a328fa1a88dde3"]; +"TA-INPUTS" [sha=ac007b4c1ae66883418b0be24fe4333542bc8e1a975735381012436dcaf2c249]; +"TA-ITERATIONS" [sha="7ddd327b59d76ca45bb82ff7546f125a969c867e3cff75ee8b6b704f2235272a"]; +"TA-METHODOLOGIES" [sha="053404ce9545ac4bd79c612e04aeb9418e43de3f574abc38e1a778c62f0bad67"]; +"TA-MISBEHAVIOURS" [sha=ae77f986a91eb1c24d3f028c833462f8096034642853126f2d164675728dd006]; +"TA-RELEASES" [sha="9ac94ebec94930f3f7694d15a63af51777ef6815a87417ec568eea730565db39"]; +"TA-SUPPLY_CHAIN" [sha="730236cb2456c6c05d3fb489651218690322d630b28a2d356e07ca8c302770f0"]; +"TA-TESTS" [sha="8229fc45081028193dcc001aeefb9288a546fc4b2496399364e09f116d50066d"]; +"TA-UPDATES" [sha="49d47e7f839cf74b8215700b4ae18b1e101e31529396b73868b5a0c07c4ac5a5"]; +"TA-VALIDATION" [sha="7a0ccbf1bc68cc03baa8a90e59e59faf7f8b4be8c569c30c91ce9dfef411f1ba"]; +"TRUSTABLE-SOFTWARE" [sha="6fe18b60f7cda4d8bce8fb2ad71ef418526b102b002704a8da7fe91375e83c49"]; +"TT-CONFIDENCE" [sha="9063ad819cf6db64f43b810beff5bc803ea5d1970f9d0765e7e4363a4c2ef33b"]; +"TT-CONSTRUCTION" [sha=e01ed0169dbe982735c5f365b9071114b621d2c7a0bf4580eb5937bf4c1be552]; +"TT-EXPECTATIONS" [sha="90cb78f0d0845b1d5c14680208a8841684123af24e14a356bc86db401bf53421"]; +"TT-PROVENANCE" [sha=ac9a0bacb8e315b9fc868a1e409953d85947cbc687686f8dcd8e276e469010a2]; +"TT-RESULTS" [sha="46122c3d2ea775a3bd95eb5d32c26bb449fbf57aa4ed7ad8216902c1d0c7ef37"]; +"TT-CHANGES" -> "TA-FIXES" [sha="2caf1fb5bcb2594647c5e64171d1d00ff8903de9af36fc0c85455ff8a159038f"]; +"TT-CHANGES" -> "TA-UPDATES" [sha="2b382d14da224d6af92c46182816cbef3162c0f0961a6cd75e91357ff1ab5e4b"]; +"TRUSTABLE-SOFTWARE" -> "TT-CHANGES" [sha="1049d16da8e7a07697b683bb7481ba3307f155f8a344b9086675879f7df48ea8"]; +"TRUSTABLE-SOFTWARE" -> "TT-CONFIDENCE" [sha=deec827f04e4f5e7bed399fbf2efdc7833625add4c255af75f1c19f5664c1419]; +"TRUSTABLE-SOFTWARE" -> "TT-CONSTRUCTION" [sha="4877307b12711eda98582474933a6d1f941175719b47bd045bafa93468062cb2"]; +"TRUSTABLE-SOFTWARE" -> "TT-EXPECTATIONS" [sha="35c7a3624d334501b1daff3bf0c5c860a3c71cd473fe86032a8fbb5c5f09b576"]; +"TRUSTABLE-SOFTWARE" -> "TT-PROVENANCE" [sha="906904794eba998e0802a449d1e791c10c7b3bdf72e0e82ff4e8cc266e0948e6"]; +"TRUSTABLE-SOFTWARE" -> "TT-RESULTS" [sha=b0b646e05f8b731359964efbf26652758c8b2587a1d7d356487c16a09291acda]; +"TT-CONFIDENCE" -> "TA-METHODOLOGIES" [sha=c420b617d475c3a3683455f064bdb27058700074a64009092908dd9d26929ff7]; +"TT-CONFIDENCE" -> "TA-CONFIDENCE" [sha=bf2f3e0b81f975ca6bf328d8131cb30edff79b8a5d33c2355c3c7af9c78953b7]; +"TT-CONSTRUCTION" -> "TA-RELEASES" [sha="9521810dc7419518fc762c7dfae15a5a69be9e7a50839c91459dc9273ea0198b"]; +"TT-CONSTRUCTION" -> "TA-TESTS" [sha="258ae56df47aa37761db527e3ecb526bc14e6b48b4d43ad08cf9552d3b4fe891"]; +"TT-CONSTRUCTION" -> "TA-ITERATIONS" [sha="0f5578363e7296147ecbd68b4ff9e8bf513a27f4da1d8b192449e8461e9f0cc9"]; +"TT-EXPECTATIONS" -> "TA-BEHAVIOURS" [sha="597a67c65e2683915363600d73417428719e19f0312c4b44c329448010299f5c"]; +"TT-EXPECTATIONS" -> "TA-MISBEHAVIOURS" [sha="75b02ee381cec191a7f206cfbc3cf40c718013dfa4ba401c7593c44d6e2968c5"]; +"TT-EXPECTATIONS" -> "TA-CONSTRAINTS" [sha="595bd8c1fc2c42717fd70ca007d1241c006cffcafb9d01f71471dba1a100fa5a"]; +"TT-EXPECTATIONS" -> "TA-INDICATORS" [sha="9d53276d74055f7c1083610e3404e613e9eb51a34d7db9abbed8d9cf9386e149"]; +"TT-PROVENANCE" -> "TA-SUPPLY_CHAIN" [sha="13b86151c3543009b1302f77c595d1e9cd0c56f3425d5a6671d7ecc73b7ebfe1"]; +"TT-PROVENANCE" -> "TA-INPUTS" [sha="07fde7fefc28d94af98bfa4cad9d7978bb598ad4fc1be46dfe5d42bb999e730f"]; +"TT-RESULTS" -> "TA-DATA" [sha="0ea300c6c476e6327e00a15bceffe3ffad022deaf25c46828caa4370ace4a011"]; +"TT-RESULTS" -> "TA-ANALYSIS" [sha="4287aba2b161b9850d8ecbf3bd04406082098de78f57b24200dca9c51d2bee96"]; +"TT-RESULTS" -> "TA-VALIDATION" [sha=e384a259589a5e8745ec6fe6b91e1dff4394bf19460f1795f54f82277f918be2]; } diff --git a/docs/trustable/TRUSTABLE-SOFTWARE.md b/docs/trustable/TRUSTABLE-SOFTWARE.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/TRUSTABLE-SOFTWARE.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-ANALYSIS.md b/docs/trustable/assertions/TA-ANALYSIS.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-ANALYSIS.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-BEHAVIOURS.md b/docs/trustable/assertions/TA-BEHAVIOURS.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-BEHAVIOURS.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-CONFIDENCE.md b/docs/trustable/assertions/TA-CONFIDENCE.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-CONFIDENCE.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-CONSTRAINTS.md b/docs/trustable/assertions/TA-CONSTRAINTS.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-CONSTRAINTS.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-DATA.md b/docs/trustable/assertions/TA-DATA.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-DATA.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-FIXES.md b/docs/trustable/assertions/TA-FIXES.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-FIXES.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-INDICATORS.md b/docs/trustable/assertions/TA-INDICATORS.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-INDICATORS.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-INPUTS.md b/docs/trustable/assertions/TA-INPUTS.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-INPUTS.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-ITERATIONS.md b/docs/trustable/assertions/TA-ITERATIONS.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-ITERATIONS.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-METHODOLOGIES.md b/docs/trustable/assertions/TA-METHODOLOGIES.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-METHODOLOGIES.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-MISBEHAVIOURS.md b/docs/trustable/assertions/TA-MISBEHAVIOURS.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-MISBEHAVIOURS.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-RELEASES.md b/docs/trustable/assertions/TA-RELEASES.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-RELEASES.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-SUPPLY_CHAIN.md b/docs/trustable/assertions/TA-SUPPLY_CHAIN.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-SUPPLY_CHAIN.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-TESTS.md b/docs/trustable/assertions/TA-TESTS.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-TESTS.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-UPDATES.md b/docs/trustable/assertions/TA-UPDATES.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-UPDATES.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/assertions/TA-VALIDATION.md b/docs/trustable/assertions/TA-VALIDATION.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/assertions/TA-VALIDATION.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/tenets/TT-CHANGES.md b/docs/trustable/tenets/TT-CHANGES.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/tenets/TT-CHANGES.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/tenets/TT-CONFIDENCE.md b/docs/trustable/tenets/TT-CONFIDENCE.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/tenets/TT-CONFIDENCE.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/tenets/TT-CONSTRUCTION.md b/docs/trustable/tenets/TT-CONSTRUCTION.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/tenets/TT-CONSTRUCTION.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/tenets/TT-EXPECTATIONS.md b/docs/trustable/tenets/TT-EXPECTATIONS.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/tenets/TT-EXPECTATIONS.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/tenets/TT-PROVENANCE.md b/docs/trustable/tenets/TT-PROVENANCE.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/tenets/TT-PROVENANCE.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + diff --git a/docs/trustable/tenets/TT-RESULTS.md b/docs/trustable/tenets/TT-RESULTS.md new file mode 100644 index 0000000..8573342 --- /dev/null +++ b/docs/trustable/tenets/TT-RESULTS.md @@ -0,0 +1,5 @@ +--- +level: 1.1 +normative: true +--- + From 95b05d8b359b07caa59ce0a46984822400e4fd09 Mon Sep 17 00:00:00 2001 From: Lennart Becker Date: Thu, 3 Jul 2025 15:15:38 +0000 Subject: [PATCH 06/10] setup: Add initial statement -Add initial statement for each TA, TT and Trustable Software --- docs/trustable/TRUSTABLE-SOFTWARE.md | 1 + docs/trustable/assertions/TA-ANALYSIS.md | 1 + docs/trustable/assertions/TA-BEHAVIOURS.md | 1 + docs/trustable/assertions/TA-CONFIDENCE.md | 1 + docs/trustable/assertions/TA-CONSTRAINTS.md | 1 + docs/trustable/assertions/TA-DATA.md | 1 + docs/trustable/assertions/TA-FIXES.md | 1 + docs/trustable/assertions/TA-INDICATORS.md | 1 + docs/trustable/assertions/TA-INPUTS.md | 1 + docs/trustable/assertions/TA-ITERATIONS.md | 1 + docs/trustable/assertions/TA-METHODOLOGIES.md | 1 + docs/trustable/assertions/TA-MISBEHAVIOURS.md | 1 + docs/trustable/assertions/TA-RELEASES.md | 1 + docs/trustable/assertions/TA-SUPPLY_CHAIN.md | 1 + docs/trustable/assertions/TA-TESTS.md | 1 + docs/trustable/assertions/TA-UPDATES.md | 1 + docs/trustable/assertions/TA-VALIDATION.md | 1 + docs/trustable/tenets/TT-CHANGES.md | 1 + docs/trustable/tenets/TT-CONFIDENCE.md | 1 + docs/trustable/tenets/TT-CONSTRUCTION.md | 1 + docs/trustable/tenets/TT-EXPECTATIONS.md | 1 + docs/trustable/tenets/TT-PROVENANCE.md | 1 + docs/trustable/tenets/TT-RESULTS.md | 1 + 23 files changed, 23 insertions(+) diff --git a/docs/trustable/TRUSTABLE-SOFTWARE.md b/docs/trustable/TRUSTABLE-SOFTWARE.md index 8573342..140c030 100644 --- a/docs/trustable/TRUSTABLE-SOFTWARE.md +++ b/docs/trustable/TRUSTABLE-SOFTWARE.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +This release of XYZ is Trustable. diff --git a/docs/trustable/assertions/TA-ANALYSIS.md b/docs/trustable/assertions/TA-ANALYSIS.md index 8573342..ce74c5c 100644 --- a/docs/trustable/assertions/TA-ANALYSIS.md +++ b/docs/trustable/assertions/TA-ANALYSIS.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Collected data from tests and monitoring of deployed software is analysed according to specified objectives. diff --git a/docs/trustable/assertions/TA-BEHAVIOURS.md b/docs/trustable/assertions/TA-BEHAVIOURS.md index 8573342..43881e2 100644 --- a/docs/trustable/assertions/TA-BEHAVIOURS.md +++ b/docs/trustable/assertions/TA-BEHAVIOURS.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Expected or required behaviours for XYZ are identified, specified, verified and validated based on analysis. diff --git a/docs/trustable/assertions/TA-CONFIDENCE.md b/docs/trustable/assertions/TA-CONFIDENCE.md index 8573342..93a0d02 100644 --- a/docs/trustable/assertions/TA-CONFIDENCE.md +++ b/docs/trustable/assertions/TA-CONFIDENCE.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Confidence in XYZ is measured based on results of analysis. diff --git a/docs/trustable/assertions/TA-CONSTRAINTS.md b/docs/trustable/assertions/TA-CONSTRAINTS.md index 8573342..72ba26d 100644 --- a/docs/trustable/assertions/TA-CONSTRAINTS.md +++ b/docs/trustable/assertions/TA-CONSTRAINTS.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Constraints on adaptation and deployment of XYZ are specified. diff --git a/docs/trustable/assertions/TA-DATA.md b/docs/trustable/assertions/TA-DATA.md index 8573342..19dda37 100644 --- a/docs/trustable/assertions/TA-DATA.md +++ b/docs/trustable/assertions/TA-DATA.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Data is collected from tests, and from monitoring of deployed software, according to specified objectives. diff --git a/docs/trustable/assertions/TA-FIXES.md b/docs/trustable/assertions/TA-FIXES.md index 8573342..353fa45 100644 --- a/docs/trustable/assertions/TA-FIXES.md +++ b/docs/trustable/assertions/TA-FIXES.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Known bugs or misbehaviours are analysed and triaged, and critical fixes or mitigations are implemented or applied. diff --git a/docs/trustable/assertions/TA-INDICATORS.md b/docs/trustable/assertions/TA-INDICATORS.md index 8573342..266d567 100644 --- a/docs/trustable/assertions/TA-INDICATORS.md +++ b/docs/trustable/assertions/TA-INDICATORS.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Advance warning indicators for misbehaviours are identified, and monitoring mechanisms are specified, verified and validated based on analysis. diff --git a/docs/trustable/assertions/TA-INPUTS.md b/docs/trustable/assertions/TA-INPUTS.md index 8573342..f9692f6 100644 --- a/docs/trustable/assertions/TA-INPUTS.md +++ b/docs/trustable/assertions/TA-INPUTS.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +All inputs to XYZ are assessed, to identify potential risks and issues. diff --git a/docs/trustable/assertions/TA-ITERATIONS.md b/docs/trustable/assertions/TA-ITERATIONS.md index 8573342..1a9847c 100644 --- a/docs/trustable/assertions/TA-ITERATIONS.md +++ b/docs/trustable/assertions/TA-ITERATIONS.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +All constructed iterations of XYZ include source code, build instructions, tests, results and attestations. diff --git a/docs/trustable/assertions/TA-METHODOLOGIES.md b/docs/trustable/assertions/TA-METHODOLOGIES.md index 8573342..8523d30 100644 --- a/docs/trustable/assertions/TA-METHODOLOGIES.md +++ b/docs/trustable/assertions/TA-METHODOLOGIES.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Manual methodologies applied for XYZ by contributors, and their results, are managed according to specified objectives. diff --git a/docs/trustable/assertions/TA-MISBEHAVIOURS.md b/docs/trustable/assertions/TA-MISBEHAVIOURS.md index 8573342..99aaebd 100644 --- a/docs/trustable/assertions/TA-MISBEHAVIOURS.md +++ b/docs/trustable/assertions/TA-MISBEHAVIOURS.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Prohibited misbehaviours for XYZ are identified, and mitigations are specified, verified and validated based on analysis. diff --git a/docs/trustable/assertions/TA-RELEASES.md b/docs/trustable/assertions/TA-RELEASES.md index 8573342..00d939e 100644 --- a/docs/trustable/assertions/TA-RELEASES.md +++ b/docs/trustable/assertions/TA-RELEASES.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Construction of XYZ releases is fully repeatable and the results are fully reproducible, with any exceptions documented and justified. diff --git a/docs/trustable/assertions/TA-SUPPLY_CHAIN.md b/docs/trustable/assertions/TA-SUPPLY_CHAIN.md index 8573342..bc3c297 100644 --- a/docs/trustable/assertions/TA-SUPPLY_CHAIN.md +++ b/docs/trustable/assertions/TA-SUPPLY_CHAIN.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +All sources for XYZ and tools are mirrored in our controlled environment. diff --git a/docs/trustable/assertions/TA-TESTS.md b/docs/trustable/assertions/TA-TESTS.md index 8573342..7cd1e24 100644 --- a/docs/trustable/assertions/TA-TESTS.md +++ b/docs/trustable/assertions/TA-TESTS.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +All tests for XYZ, and its build and test environments, are constructed from controlled/mirrored sources and are reproducible, with any exceptions documented. diff --git a/docs/trustable/assertions/TA-UPDATES.md b/docs/trustable/assertions/TA-UPDATES.md index 8573342..f7d008d 100644 --- a/docs/trustable/assertions/TA-UPDATES.md +++ b/docs/trustable/assertions/TA-UPDATES.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +XYZ components, configurations and tools are updated under specified change and configuration management controls. diff --git a/docs/trustable/assertions/TA-VALIDATION.md b/docs/trustable/assertions/TA-VALIDATION.md index 8573342..08cba34 100644 --- a/docs/trustable/assertions/TA-VALIDATION.md +++ b/docs/trustable/assertions/TA-VALIDATION.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +All specified tests are executed repeatedly, under defined conditions in controlled environments, according to specified objectives. diff --git a/docs/trustable/tenets/TT-CHANGES.md b/docs/trustable/tenets/TT-CHANGES.md index 8573342..2234874 100644 --- a/docs/trustable/tenets/TT-CHANGES.md +++ b/docs/trustable/tenets/TT-CHANGES.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +XYZ is actively maintained, with regular updates to dependencies, and changes are verified to prevent regressions. diff --git a/docs/trustable/tenets/TT-CONFIDENCE.md b/docs/trustable/tenets/TT-CONFIDENCE.md index 8573342..925fdaa 100644 --- a/docs/trustable/tenets/TT-CONFIDENCE.md +++ b/docs/trustable/tenets/TT-CONFIDENCE.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Confidence in XYZ is achieved by measuring and analysing behaviour and evidence over time. diff --git a/docs/trustable/tenets/TT-CONSTRUCTION.md b/docs/trustable/tenets/TT-CONSTRUCTION.md index 8573342..1d598c4 100644 --- a/docs/trustable/tenets/TT-CONSTRUCTION.md +++ b/docs/trustable/tenets/TT-CONSTRUCTION.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Tools are provided to build XYZ from trusted sources (also provided) with full reproducibility. diff --git a/docs/trustable/tenets/TT-EXPECTATIONS.md b/docs/trustable/tenets/TT-EXPECTATIONS.md index 8573342..16e67bc 100644 --- a/docs/trustable/tenets/TT-EXPECTATIONS.md +++ b/docs/trustable/tenets/TT-EXPECTATIONS.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Documentation is provided, specifying what XYZ is expected to do, and what it must not do, and how this is verified. diff --git a/docs/trustable/tenets/TT-PROVENANCE.md b/docs/trustable/tenets/TT-PROVENANCE.md index 8573342..d7bb8c0 100644 --- a/docs/trustable/tenets/TT-PROVENANCE.md +++ b/docs/trustable/tenets/TT-PROVENANCE.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +All inputs (and attestations for claims) for XYZ are provided with known provenance. diff --git a/docs/trustable/tenets/TT-RESULTS.md b/docs/trustable/tenets/TT-RESULTS.md index 8573342..fe67d03 100644 --- a/docs/trustable/tenets/TT-RESULTS.md +++ b/docs/trustable/tenets/TT-RESULTS.md @@ -3,3 +3,4 @@ level: 1.1 normative: true --- +Evidence is provided to demonstrate that XYZ does what it is supposed to do, and does not do what it must not do. From 125651a843194dd73b783ad5b7520ded24c716a8 Mon Sep 17 00:00:00 2001 From: Lennart Becker Date: Thu, 3 Jul 2025 15:28:30 +0000 Subject: [PATCH 07/10] update: Review items and links -Review items and links by trudag manage set-item/link ... --- .dotstop.dot | 90 ++++++++++++++++++++++++++-------------------------- 1 file changed, 45 insertions(+), 45 deletions(-) diff --git a/.dotstop.dot b/.dotstop.dot index e9d6778..637e0da 100644 --- a/.dotstop.dot +++ b/.dotstop.dot @@ -2,49 +2,49 @@ # Generated using trustable 2025.6.25. digraph G { -"TT-CHANGES" [sha=b86f778a375ffce19e860dbb0a896b49b43a6da679d6a5dc959dfeab9ac3b9af]; -"TA-ANALYSIS" [sha="4eff9d0fd4e4abef6b94a0bcd942a963cef8acbba70f87df701f464ef881c952"]; -"TA-BEHAVIOURS" [sha="2a489bdfa94ff7ddfdc93f8b984489482d9f5c81298d6c1184f01498c2119850"]; -"TA-CONFIDENCE" [sha=ab14eaafcab5cf364c172b8e3cab184b836c51bcf48e114424adcd371a00d9af]; -"TA-CONSTRAINTS" [sha=f31363168e413655641cddf5e4b9cc21a149045a1195a46bf8e49cedaaf8748d]; -"TA-DATA" [sha="6534491ae483042ee384e986d82c60f436f4ba388821fc0ee86d730a0afc19d7"]; -"TA-FIXES" [sha="49ff8de3c307b3763b3face96ce7463bfe7e99e1026cbe7a9d400370611984a2"]; -"TA-INDICATORS" [sha="4aea77126df3f0a5418a42bd05ebb091b2327e84b7e7c95e47a328fa1a88dde3"]; -"TA-INPUTS" [sha=ac007b4c1ae66883418b0be24fe4333542bc8e1a975735381012436dcaf2c249]; -"TA-ITERATIONS" [sha="7ddd327b59d76ca45bb82ff7546f125a969c867e3cff75ee8b6b704f2235272a"]; -"TA-METHODOLOGIES" [sha="053404ce9545ac4bd79c612e04aeb9418e43de3f574abc38e1a778c62f0bad67"]; -"TA-MISBEHAVIOURS" [sha=ae77f986a91eb1c24d3f028c833462f8096034642853126f2d164675728dd006]; -"TA-RELEASES" [sha="9ac94ebec94930f3f7694d15a63af51777ef6815a87417ec568eea730565db39"]; -"TA-SUPPLY_CHAIN" [sha="730236cb2456c6c05d3fb489651218690322d630b28a2d356e07ca8c302770f0"]; -"TA-TESTS" [sha="8229fc45081028193dcc001aeefb9288a546fc4b2496399364e09f116d50066d"]; -"TA-UPDATES" [sha="49d47e7f839cf74b8215700b4ae18b1e101e31529396b73868b5a0c07c4ac5a5"]; -"TA-VALIDATION" [sha="7a0ccbf1bc68cc03baa8a90e59e59faf7f8b4be8c569c30c91ce9dfef411f1ba"]; -"TRUSTABLE-SOFTWARE" [sha="6fe18b60f7cda4d8bce8fb2ad71ef418526b102b002704a8da7fe91375e83c49"]; -"TT-CONFIDENCE" [sha="9063ad819cf6db64f43b810beff5bc803ea5d1970f9d0765e7e4363a4c2ef33b"]; -"TT-CONSTRUCTION" [sha=e01ed0169dbe982735c5f365b9071114b621d2c7a0bf4580eb5937bf4c1be552]; -"TT-EXPECTATIONS" [sha="90cb78f0d0845b1d5c14680208a8841684123af24e14a356bc86db401bf53421"]; -"TT-PROVENANCE" [sha=ac9a0bacb8e315b9fc868a1e409953d85947cbc687686f8dcd8e276e469010a2]; -"TT-RESULTS" [sha="46122c3d2ea775a3bd95eb5d32c26bb449fbf57aa4ed7ad8216902c1d0c7ef37"]; -"TT-CHANGES" -> "TA-FIXES" [sha="2caf1fb5bcb2594647c5e64171d1d00ff8903de9af36fc0c85455ff8a159038f"]; -"TT-CHANGES" -> "TA-UPDATES" [sha="2b382d14da224d6af92c46182816cbef3162c0f0961a6cd75e91357ff1ab5e4b"]; -"TRUSTABLE-SOFTWARE" -> "TT-CHANGES" [sha="1049d16da8e7a07697b683bb7481ba3307f155f8a344b9086675879f7df48ea8"]; -"TRUSTABLE-SOFTWARE" -> "TT-CONFIDENCE" [sha=deec827f04e4f5e7bed399fbf2efdc7833625add4c255af75f1c19f5664c1419]; -"TRUSTABLE-SOFTWARE" -> "TT-CONSTRUCTION" [sha="4877307b12711eda98582474933a6d1f941175719b47bd045bafa93468062cb2"]; -"TRUSTABLE-SOFTWARE" -> "TT-EXPECTATIONS" [sha="35c7a3624d334501b1daff3bf0c5c860a3c71cd473fe86032a8fbb5c5f09b576"]; -"TRUSTABLE-SOFTWARE" -> "TT-PROVENANCE" [sha="906904794eba998e0802a449d1e791c10c7b3bdf72e0e82ff4e8cc266e0948e6"]; -"TRUSTABLE-SOFTWARE" -> "TT-RESULTS" [sha=b0b646e05f8b731359964efbf26652758c8b2587a1d7d356487c16a09291acda]; -"TT-CONFIDENCE" -> "TA-METHODOLOGIES" [sha=c420b617d475c3a3683455f064bdb27058700074a64009092908dd9d26929ff7]; -"TT-CONFIDENCE" -> "TA-CONFIDENCE" [sha=bf2f3e0b81f975ca6bf328d8131cb30edff79b8a5d33c2355c3c7af9c78953b7]; -"TT-CONSTRUCTION" -> "TA-RELEASES" [sha="9521810dc7419518fc762c7dfae15a5a69be9e7a50839c91459dc9273ea0198b"]; -"TT-CONSTRUCTION" -> "TA-TESTS" [sha="258ae56df47aa37761db527e3ecb526bc14e6b48b4d43ad08cf9552d3b4fe891"]; -"TT-CONSTRUCTION" -> "TA-ITERATIONS" [sha="0f5578363e7296147ecbd68b4ff9e8bf513a27f4da1d8b192449e8461e9f0cc9"]; -"TT-EXPECTATIONS" -> "TA-BEHAVIOURS" [sha="597a67c65e2683915363600d73417428719e19f0312c4b44c329448010299f5c"]; -"TT-EXPECTATIONS" -> "TA-MISBEHAVIOURS" [sha="75b02ee381cec191a7f206cfbc3cf40c718013dfa4ba401c7593c44d6e2968c5"]; -"TT-EXPECTATIONS" -> "TA-CONSTRAINTS" [sha="595bd8c1fc2c42717fd70ca007d1241c006cffcafb9d01f71471dba1a100fa5a"]; -"TT-EXPECTATIONS" -> "TA-INDICATORS" [sha="9d53276d74055f7c1083610e3404e613e9eb51a34d7db9abbed8d9cf9386e149"]; -"TT-PROVENANCE" -> "TA-SUPPLY_CHAIN" [sha="13b86151c3543009b1302f77c595d1e9cd0c56f3425d5a6671d7ecc73b7ebfe1"]; -"TT-PROVENANCE" -> "TA-INPUTS" [sha="07fde7fefc28d94af98bfa4cad9d7978bb598ad4fc1be46dfe5d42bb999e730f"]; -"TT-RESULTS" -> "TA-DATA" [sha="0ea300c6c476e6327e00a15bceffe3ffad022deaf25c46828caa4370ace4a011"]; -"TT-RESULTS" -> "TA-ANALYSIS" [sha="4287aba2b161b9850d8ecbf3bd04406082098de78f57b24200dca9c51d2bee96"]; -"TT-RESULTS" -> "TA-VALIDATION" [sha=e384a259589a5e8745ec6fe6b91e1dff4394bf19460f1795f54f82277f918be2]; +"TT-CHANGES" [sha="4a086732b7536c247f46e786c690bc81c34fe82490fda85f0797983d1fa1ba72"]; +"TA-ANALYSIS" [sha="76cb774300c75d7b6a24c872fe691f2f0cd1d36cc4a2493e835f6b861bc79c34"]; +"TA-BEHAVIOURS" [sha="5d4efe1fa03929829eca8f81199d71ad49d0923f3dada54dbbe2c59b96416246"]; +"TA-CONFIDENCE" [sha="4aa93d2dbf45c714660dabedde37981109b2ad0553959226557b614af22cded2"]; +"TA-CONSTRAINTS" [sha=b6a7e9f9303c4d00b4cc7a9105b9371a0bbdbd6dfbdd19eb6a121d73f4e98d16]; +"TA-DATA" [sha="796e4b5851340906568a47c4436c1fa67ca1c48c98a8b6a16b19283d04cea799"]; +"TA-FIXES" [sha="08f069034d1399c43c4321c5f05de72125f4155a8b68b9bbb2029cb679e6ad48"]; +"TA-INDICATORS" [sha=a5392ef1e3e40d82ef9a304d1304bf007fa85d961ab8ea268231cb212d763477]; +"TA-INPUTS" [sha="88bd55c2ecf7599775a48463d88d6eb20c407e59be02b7fe33c1d79df453af12"]; +"TA-ITERATIONS" [sha="1add795875de0c57b63b29df099bea2177a69a4005280f0fe9ce098015e5e818"]; +"TA-METHODOLOGIES" [sha="24d5a7953ffdf99b22eb1d9a8453158a85c8e96c899972d5bc44e4e70ddabcdc"]; +"TA-MISBEHAVIOURS" [sha="975a70f96b5d537f3deecb58ed526b8434f18491973e0fb3eaf6fdd8cf615cb6"]; +"TA-RELEASES" [sha="0a9784300799d216ec66c67b32e2a896d8ba654a5ba232f527bf1444f2cafec8"]; +"TA-SUPPLY_CHAIN" [sha=eead1eab250c79b8350c5a1732c3ec41f65807f90069c4e19fff750b93eb21bc]; +"TA-TESTS" [sha="71e3bb31b9a937adcc438b3e0581a35bde7157398d11dd75de853ab32e565ce8"]; +"TA-UPDATES" [sha="44c56d2467ce1ce550e15912c7aaa3ecd4a40a79791857dd9332030bce31f5fa"]; +"TA-VALIDATION" [sha="20f6d87b89d9fbffe427a734ab70906c26ad787e53032a8c4f4d599227704be6"]; +"TRUSTABLE-SOFTWARE" [sha="1d95e95557fce96a6afd9a8fa9ae7f712f1d344a618dd1bd4281c0c9bb679019"]; +"TT-CONFIDENCE" [sha=aec1c3530cd85a0cfeb6f5e4b9f5edd38199407cd42d95d151828e1eebc673f1]; +"TT-CONSTRUCTION" [sha=a8dba34d078e9e6346a32719778f8a21c5ab4fef561575ef684d0bb361a39e85]; +"TT-EXPECTATIONS" [sha=eb3b99bc2977f2e8039b7c51cb03d0c22024119fbd59c0a71b79873fae9d1547]; +"TT-PROVENANCE" [sha="464d66ec188f5008a24aa9fca9c82ee4941015c716593b09c31b6f679b7ea0e4"]; +"TT-RESULTS" [sha="5249bb84a61c58ef002ab86edba14ea9bc7f573f2a781626b995d79efc378ff2"]; +"TT-CHANGES" -> "TA-FIXES" [sha="73c42e776c0f2067bec787580363c92032595da75573d5793f13c5e3559d044d"]; +"TT-CHANGES" -> "TA-UPDATES" [sha=e8fdebd723f00411bed32c98a1cd3b047d9f4d45724e29c1a57be00cc1e0c5c3]; +"TRUSTABLE-SOFTWARE" -> "TT-CHANGES" [sha=b1cc67da89ccb17cb6908f7f042a1588e8786d7e3fc5d693d5675ab956861517]; +"TRUSTABLE-SOFTWARE" -> "TT-CONFIDENCE" [sha=ce4be7c13abdb79f64dce7df4dc346b8b105175632efc0b7882084c006a656e6]; +"TRUSTABLE-SOFTWARE" -> "TT-CONSTRUCTION" [sha="437dab78ab6bbd37dac941f6b847703f6c37e739153d4eadf48e592370a47cdb"]; +"TRUSTABLE-SOFTWARE" -> "TT-EXPECTATIONS" [sha=deb676d165f5c6351af879773a3171047f321513e2bafb0c4ad9cc78c51d15d4]; +"TRUSTABLE-SOFTWARE" -> "TT-PROVENANCE" [sha=e323a5995c367e970c23796c3cfa2837a780c5ef29e672f268af111d8f6f1300]; +"TRUSTABLE-SOFTWARE" -> "TT-RESULTS" [sha="7e3f6ce2b7371727a08578276b46626410472ee83a53ce234527280d7c99d7b0"]; +"TT-CONFIDENCE" -> "TA-METHODOLOGIES" [sha=fcc0006e13a5d248937c4a76855c954db9c2848218e3196bd377d1679027e762]; +"TT-CONFIDENCE" -> "TA-CONFIDENCE" [sha="8efd66c5255e15d11b4a74fbc79d0c4a7fa9a744fdcc026b9fee2e90ec1148f5"]; +"TT-CONSTRUCTION" -> "TA-RELEASES" [sha="60673fda603b2c0da8ca6828b688e640168a78c53d8ba33d6d92a5943f1ed07a"]; +"TT-CONSTRUCTION" -> "TA-TESTS" [sha=e88dbf0d4b8c1db886b04c73fae9de8f599d5958f0ebe3a07a0d34112deee586]; +"TT-CONSTRUCTION" -> "TA-ITERATIONS" [sha="1365c148e1f8685e0c673a14b5aebb55d3542b6a2aa38614c23051a8cb797625"]; +"TT-EXPECTATIONS" -> "TA-BEHAVIOURS" [sha=c791907335b64629c40fa3904cb1ee2f90bcd108651d768bfdb5745e77f277a9]; +"TT-EXPECTATIONS" -> "TA-MISBEHAVIOURS" [sha="3ed521c03263953246e4c328058c7a089f69b88fa4032fd7817abbd890112d39"]; +"TT-EXPECTATIONS" -> "TA-CONSTRAINTS" [sha="0a27dfbdac555f0bf00df3bf9891ec8e3f0cd21e7b2fc0bef90bf1354bd136bc"]; +"TT-EXPECTATIONS" -> "TA-INDICATORS" [sha="02363119362f62b9c3e6964ce8ce1040cc02addc6744bff4988d7b800a325b27"]; +"TT-PROVENANCE" -> "TA-SUPPLY_CHAIN" [sha="5c6b1a4d73bd824df97dd84054619c839e5f80e0303fe6cae63422e572c2f6b8"]; +"TT-PROVENANCE" -> "TA-INPUTS" [sha="2ad319757fb069c2a9a0fdd2ce00dbda0b5d4064b3a21c5c75fa990f1c74f501"]; +"TT-RESULTS" -> "TA-DATA" [sha=c32ec353c886f8e8453fc81d3c22f79d0c3dd1c49ca61f8e3d667d9cfd2df333]; +"TT-RESULTS" -> "TA-ANALYSIS" [sha=e38dabbcd806b320bc91a14a99c80510402513454d239f2c68970abcc28696b3]; +"TT-RESULTS" -> "TA-VALIDATION" [sha="5d01a5f457e49e0ca1bfbe343063210681e45c7ff3a4b408acd10e9e7c8f7442"]; } From d7cdd5cb13ee04aeb1dbdbde7956cc09e318d100 Mon Sep 17 00:00:00 2001 From: Lennart Becker Date: Fri, 4 Jul 2025 04:58:09 +0000 Subject: [PATCH 08/10] update: Adapt explaination to TAs and TT -Change name from XYZ to JSON-Library --- .dotstop.dot | 80 +++++++++---------- docs/trustable/TRUSTABLE-SOFTWARE.md | 2 +- docs/trustable/assertions/TA-BEHAVIOURS.md | 2 +- docs/trustable/assertions/TA-CONFIDENCE.md | 2 +- docs/trustable/assertions/TA-CONSTRAINTS.md | 2 +- docs/trustable/assertions/TA-INPUTS.md | 2 +- docs/trustable/assertions/TA-ITERATIONS.md | 2 +- docs/trustable/assertions/TA-METHODOLOGIES.md | 2 +- docs/trustable/assertions/TA-MISBEHAVIOURS.md | 2 +- docs/trustable/assertions/TA-RELEASES.md | 2 +- docs/trustable/assertions/TA-SUPPLY_CHAIN.md | 2 +- docs/trustable/assertions/TA-TESTS.md | 2 +- docs/trustable/assertions/TA-UPDATES.md | 2 +- docs/trustable/tenets/TT-CHANGES.md | 2 +- docs/trustable/tenets/TT-CONFIDENCE.md | 2 +- docs/trustable/tenets/TT-CONSTRUCTION.md | 2 +- docs/trustable/tenets/TT-EXPECTATIONS.md | 2 +- docs/trustable/tenets/TT-PROVENANCE.md | 2 +- docs/trustable/tenets/TT-RESULTS.md | 2 +- 19 files changed, 58 insertions(+), 58 deletions(-) diff --git a/.dotstop.dot b/.dotstop.dot index 637e0da..ec25ec5 100644 --- a/.dotstop.dot +++ b/.dotstop.dot @@ -2,49 +2,49 @@ # Generated using trustable 2025.6.25. digraph G { -"TT-CHANGES" [sha="4a086732b7536c247f46e786c690bc81c34fe82490fda85f0797983d1fa1ba72"]; +"TT-CHANGES" [sha=e276949659e77f8f453a9b32798f607abdfda44450bb10bfbca7a20d68835f7a]; "TA-ANALYSIS" [sha="76cb774300c75d7b6a24c872fe691f2f0cd1d36cc4a2493e835f6b861bc79c34"]; -"TA-BEHAVIOURS" [sha="5d4efe1fa03929829eca8f81199d71ad49d0923f3dada54dbbe2c59b96416246"]; -"TA-CONFIDENCE" [sha="4aa93d2dbf45c714660dabedde37981109b2ad0553959226557b614af22cded2"]; -"TA-CONSTRAINTS" [sha=b6a7e9f9303c4d00b4cc7a9105b9371a0bbdbd6dfbdd19eb6a121d73f4e98d16]; +"TA-BEHAVIOURS" [sha="3ec27e29aa991978efe6a56267b98c2a08b27a4aff693d5cf2b01dfe72276570"]; +"TA-CONFIDENCE" [sha=afda09331b2fc3b8d9b1cd921bee66251a65e5543a473c61eb03f9ea11d57eb5]; +"TA-CONSTRAINTS" [sha=cdee0ae34c33110044975efc981e4ac4d63d824aaaac78233b1f3828ef070da3]; "TA-DATA" [sha="796e4b5851340906568a47c4436c1fa67ca1c48c98a8b6a16b19283d04cea799"]; "TA-FIXES" [sha="08f069034d1399c43c4321c5f05de72125f4155a8b68b9bbb2029cb679e6ad48"]; "TA-INDICATORS" [sha=a5392ef1e3e40d82ef9a304d1304bf007fa85d961ab8ea268231cb212d763477]; -"TA-INPUTS" [sha="88bd55c2ecf7599775a48463d88d6eb20c407e59be02b7fe33c1d79df453af12"]; -"TA-ITERATIONS" [sha="1add795875de0c57b63b29df099bea2177a69a4005280f0fe9ce098015e5e818"]; -"TA-METHODOLOGIES" [sha="24d5a7953ffdf99b22eb1d9a8453158a85c8e96c899972d5bc44e4e70ddabcdc"]; -"TA-MISBEHAVIOURS" [sha="975a70f96b5d537f3deecb58ed526b8434f18491973e0fb3eaf6fdd8cf615cb6"]; -"TA-RELEASES" [sha="0a9784300799d216ec66c67b32e2a896d8ba654a5ba232f527bf1444f2cafec8"]; -"TA-SUPPLY_CHAIN" [sha=eead1eab250c79b8350c5a1732c3ec41f65807f90069c4e19fff750b93eb21bc]; -"TA-TESTS" [sha="71e3bb31b9a937adcc438b3e0581a35bde7157398d11dd75de853ab32e565ce8"]; -"TA-UPDATES" [sha="44c56d2467ce1ce550e15912c7aaa3ecd4a40a79791857dd9332030bce31f5fa"]; +"TA-INPUTS" [sha="6edcb6e0ea0a918f611d6644da7a28dd5c924a210984cd913e7ff558677a6ea6"]; +"TA-ITERATIONS" [sha=c445bfe866db71df67d4e87353d674b62abce19b52048fac37284d8065d67678]; +"TA-METHODOLOGIES" [sha=d24f6055c79268b1f6e4bdf73951719f192d6f492a7376f85b349a95ccb2a319]; +"TA-MISBEHAVIOURS" [sha=d24fcdeee0ae0fa696f272dc39c8e9e37cce7fb9b2cfd07bcd1451b765be5c6e]; +"TA-RELEASES" [sha="3c924109e9916fb154eadbc2d733a8413ae551a1282b73de389b9ad7540a4e75"]; +"TA-SUPPLY_CHAIN" [sha="0629a5a339322874ad3d51c0c14219ede72195bf514abac82c95ebc3a685ae2c"]; +"TA-TESTS" [sha=afa5e61fc86f70f99f6c60b4f2b51ba7d486705f197048a7dc3fc8fea225385c]; +"TA-UPDATES" [sha="9f0554c79d125a37c7e68b9efbb022dc4853a3e2f87c7d224d30c51f5b9b8435"]; "TA-VALIDATION" [sha="20f6d87b89d9fbffe427a734ab70906c26ad787e53032a8c4f4d599227704be6"]; -"TRUSTABLE-SOFTWARE" [sha="1d95e95557fce96a6afd9a8fa9ae7f712f1d344a618dd1bd4281c0c9bb679019"]; -"TT-CONFIDENCE" [sha=aec1c3530cd85a0cfeb6f5e4b9f5edd38199407cd42d95d151828e1eebc673f1]; -"TT-CONSTRUCTION" [sha=a8dba34d078e9e6346a32719778f8a21c5ab4fef561575ef684d0bb361a39e85]; -"TT-EXPECTATIONS" [sha=eb3b99bc2977f2e8039b7c51cb03d0c22024119fbd59c0a71b79873fae9d1547]; -"TT-PROVENANCE" [sha="464d66ec188f5008a24aa9fca9c82ee4941015c716593b09c31b6f679b7ea0e4"]; -"TT-RESULTS" [sha="5249bb84a61c58ef002ab86edba14ea9bc7f573f2a781626b995d79efc378ff2"]; -"TT-CHANGES" -> "TA-FIXES" [sha="73c42e776c0f2067bec787580363c92032595da75573d5793f13c5e3559d044d"]; -"TT-CHANGES" -> "TA-UPDATES" [sha=e8fdebd723f00411bed32c98a1cd3b047d9f4d45724e29c1a57be00cc1e0c5c3]; -"TRUSTABLE-SOFTWARE" -> "TT-CHANGES" [sha=b1cc67da89ccb17cb6908f7f042a1588e8786d7e3fc5d693d5675ab956861517]; -"TRUSTABLE-SOFTWARE" -> "TT-CONFIDENCE" [sha=ce4be7c13abdb79f64dce7df4dc346b8b105175632efc0b7882084c006a656e6]; -"TRUSTABLE-SOFTWARE" -> "TT-CONSTRUCTION" [sha="437dab78ab6bbd37dac941f6b847703f6c37e739153d4eadf48e592370a47cdb"]; -"TRUSTABLE-SOFTWARE" -> "TT-EXPECTATIONS" [sha=deb676d165f5c6351af879773a3171047f321513e2bafb0c4ad9cc78c51d15d4]; -"TRUSTABLE-SOFTWARE" -> "TT-PROVENANCE" [sha=e323a5995c367e970c23796c3cfa2837a780c5ef29e672f268af111d8f6f1300]; -"TRUSTABLE-SOFTWARE" -> "TT-RESULTS" [sha="7e3f6ce2b7371727a08578276b46626410472ee83a53ce234527280d7c99d7b0"]; -"TT-CONFIDENCE" -> "TA-METHODOLOGIES" [sha=fcc0006e13a5d248937c4a76855c954db9c2848218e3196bd377d1679027e762]; -"TT-CONFIDENCE" -> "TA-CONFIDENCE" [sha="8efd66c5255e15d11b4a74fbc79d0c4a7fa9a744fdcc026b9fee2e90ec1148f5"]; -"TT-CONSTRUCTION" -> "TA-RELEASES" [sha="60673fda603b2c0da8ca6828b688e640168a78c53d8ba33d6d92a5943f1ed07a"]; -"TT-CONSTRUCTION" -> "TA-TESTS" [sha=e88dbf0d4b8c1db886b04c73fae9de8f599d5958f0ebe3a07a0d34112deee586]; -"TT-CONSTRUCTION" -> "TA-ITERATIONS" [sha="1365c148e1f8685e0c673a14b5aebb55d3542b6a2aa38614c23051a8cb797625"]; -"TT-EXPECTATIONS" -> "TA-BEHAVIOURS" [sha=c791907335b64629c40fa3904cb1ee2f90bcd108651d768bfdb5745e77f277a9]; -"TT-EXPECTATIONS" -> "TA-MISBEHAVIOURS" [sha="3ed521c03263953246e4c328058c7a089f69b88fa4032fd7817abbd890112d39"]; -"TT-EXPECTATIONS" -> "TA-CONSTRAINTS" [sha="0a27dfbdac555f0bf00df3bf9891ec8e3f0cd21e7b2fc0bef90bf1354bd136bc"]; -"TT-EXPECTATIONS" -> "TA-INDICATORS" [sha="02363119362f62b9c3e6964ce8ce1040cc02addc6744bff4988d7b800a325b27"]; -"TT-PROVENANCE" -> "TA-SUPPLY_CHAIN" [sha="5c6b1a4d73bd824df97dd84054619c839e5f80e0303fe6cae63422e572c2f6b8"]; -"TT-PROVENANCE" -> "TA-INPUTS" [sha="2ad319757fb069c2a9a0fdd2ce00dbda0b5d4064b3a21c5c75fa990f1c74f501"]; -"TT-RESULTS" -> "TA-DATA" [sha=c32ec353c886f8e8453fc81d3c22f79d0c3dd1c49ca61f8e3d667d9cfd2df333]; -"TT-RESULTS" -> "TA-ANALYSIS" [sha=e38dabbcd806b320bc91a14a99c80510402513454d239f2c68970abcc28696b3]; -"TT-RESULTS" -> "TA-VALIDATION" [sha="5d01a5f457e49e0ca1bfbe343063210681e45c7ff3a4b408acd10e9e7c8f7442"]; +"TRUSTABLE-SOFTWARE" [sha="6513a163e6eac72c930253e4bc0404be5ea625d1dec436104991a565df4baaa3"]; +"TT-CONFIDENCE" [sha="507f0a905667e1d91265a4e577534eb51ebc0b3e84ffc1956cd0d80dc61b6d3a"]; +"TT-CONSTRUCTION" [sha="3752c6a34c0cc3ef905e000e635a7373748976744f3d1f75f1c2ed172366e350"]; +"TT-EXPECTATIONS" [sha="362eb86c872fb76b2a1075ff978252112bbad0a5fb3041895381f8c76b64c5e6"]; +"TT-PROVENANCE" [sha="4607bf9c0527508673fa37c110b9cf5f2ff0567c324cf728623f1e8ff094be32"]; +"TT-RESULTS" [sha="382987171ac6dc0d1114f5c0dbb77452300c14596514bbab126ae1f7d1bbb842"]; +"TT-CHANGES" -> "TA-FIXES" [sha=d9dc2ad1dcbfde839249e8df9eb89ef978bdfee7b7b4933fae12e10cbc91762b]; +"TT-CHANGES" -> "TA-UPDATES" [sha=f8948db2f344f4cdd5bdc71dc54e63b446f40af09235c37f5d5cf59dcfdfbfa0]; +"TRUSTABLE-SOFTWARE" -> "TT-CHANGES" [sha=a526e6de925b57edddfbc350de334735ee7ef23828b9e66ba781e8633c9f72df]; +"TRUSTABLE-SOFTWARE" -> "TT-CONFIDENCE" [sha="07cdcfab2c8c5121dd0acecf3771ee674dde8663e4cb335cfb74aa774f10cc5b"]; +"TRUSTABLE-SOFTWARE" -> "TT-CONSTRUCTION" [sha="8598c4138e9dda4691a3cbc1613530bb1a3f1c163edf523e41a9ba532b98fe83"]; +"TRUSTABLE-SOFTWARE" -> "TT-EXPECTATIONS" [sha=f6dba0c755d9ac4c9ed0ed2e08d5d51e6f7f1572e6de5581c90fbdaf3cafa4d4]; +"TRUSTABLE-SOFTWARE" -> "TT-PROVENANCE" [sha=c97824acbd35cf2b4a9e4ee2f66c46333b483eac99ef690e2bb105ef4756e527]; +"TRUSTABLE-SOFTWARE" -> "TT-RESULTS" [sha=b9e5b5fdf1cda120574cd2f351e9876a0a0c683152267d3898e6c161e7bda988]; +"TT-CONFIDENCE" -> "TA-METHODOLOGIES" [sha="5752e4930e6b0dbc6829b053f4bc7e7e054d416a8c9b2e19a1c3dd83d51fba9b"]; +"TT-CONFIDENCE" -> "TA-CONFIDENCE" [sha="2eaf5b9e879128e866585d5016bfde73f1ef1b192915fdb988cba7b6a0e679f2"]; +"TT-CONSTRUCTION" -> "TA-RELEASES" [sha="290d67048ce0b7e9d40d236b01fc79305d3d49d2c4a541ab3fe48d38347d45d5"]; +"TT-CONSTRUCTION" -> "TA-TESTS" [sha=dddbe1b9b7a7fdaf4003a939660dcb547eacfd78b6f446cb4e065047d95efd9a]; +"TT-CONSTRUCTION" -> "TA-ITERATIONS" [sha="671795bbd8a789803e29f531e12074129e99f1329d27bc97ad0bbee01d8432db"]; +"TT-EXPECTATIONS" -> "TA-BEHAVIOURS" [sha=bab309ba80ce2c2b1d7146220da91f1f456c03d4aad8a724db777933e8924ebb]; +"TT-EXPECTATIONS" -> "TA-MISBEHAVIOURS" [sha=b9c4c4ce6e39a7171aa8b02c3267172229ff3de17ff5cd2da9839e67334e5453]; +"TT-EXPECTATIONS" -> "TA-CONSTRAINTS" [sha=af6a8726cb3a4274ce8ef7e000f0ea9d8f301e8d543254e146045c263873260d]; +"TT-EXPECTATIONS" -> "TA-INDICATORS" [sha=c6b66b2315b853fbe7f4844631f8a522cf0cff8f2984dea65c8b627512efdede]; +"TT-PROVENANCE" -> "TA-SUPPLY_CHAIN" [sha=a9efbde8812834ed5ea620c826a6b41f28219b61a06b00dcd74632685124a8b9]; +"TT-PROVENANCE" -> "TA-INPUTS" [sha=b72b13298357c1738735fc9cc56b0e64cc9fec0124f1721315f64f24faa17f71]; +"TT-RESULTS" -> "TA-DATA" [sha=bdbef171f4a2b69b6f8b47d3b2c9f0642ffb3120ba471c7be0da274a54c4d549]; +"TT-RESULTS" -> "TA-ANALYSIS" [sha="53f912e517e9b33ca019d4a4aac432fee37c3315ea9a155e145b90122f9c8fb7"]; +"TT-RESULTS" -> "TA-VALIDATION" [sha=bc8f3c8b5afd04ec4f77e750b8c82e5bb1c729811895ff49663b904d42d49fdc]; } diff --git a/docs/trustable/TRUSTABLE-SOFTWARE.md b/docs/trustable/TRUSTABLE-SOFTWARE.md index 140c030..78ec02d 100644 --- a/docs/trustable/TRUSTABLE-SOFTWARE.md +++ b/docs/trustable/TRUSTABLE-SOFTWARE.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -This release of XYZ is Trustable. +This release of JSON-Library is Trustable. diff --git a/docs/trustable/assertions/TA-BEHAVIOURS.md b/docs/trustable/assertions/TA-BEHAVIOURS.md index 43881e2..c25ba66 100644 --- a/docs/trustable/assertions/TA-BEHAVIOURS.md +++ b/docs/trustable/assertions/TA-BEHAVIOURS.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -Expected or required behaviours for XYZ are identified, specified, verified and validated based on analysis. +Expected or required behaviours for JSON-Library are identified, specified, verified and validated based on analysis. diff --git a/docs/trustable/assertions/TA-CONFIDENCE.md b/docs/trustable/assertions/TA-CONFIDENCE.md index 93a0d02..599d0e7 100644 --- a/docs/trustable/assertions/TA-CONFIDENCE.md +++ b/docs/trustable/assertions/TA-CONFIDENCE.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -Confidence in XYZ is measured based on results of analysis. +Confidence in JSON-Library is measured based on results of analysis. diff --git a/docs/trustable/assertions/TA-CONSTRAINTS.md b/docs/trustable/assertions/TA-CONSTRAINTS.md index 72ba26d..62075eb 100644 --- a/docs/trustable/assertions/TA-CONSTRAINTS.md +++ b/docs/trustable/assertions/TA-CONSTRAINTS.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -Constraints on adaptation and deployment of XYZ are specified. +Constraints on adaptation and deployment of JSON-Library are specified. diff --git a/docs/trustable/assertions/TA-INPUTS.md b/docs/trustable/assertions/TA-INPUTS.md index f9692f6..6bcf95f 100644 --- a/docs/trustable/assertions/TA-INPUTS.md +++ b/docs/trustable/assertions/TA-INPUTS.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -All inputs to XYZ are assessed, to identify potential risks and issues. +All inputs to JSON-Library are assessed, to identify potential risks and issues. diff --git a/docs/trustable/assertions/TA-ITERATIONS.md b/docs/trustable/assertions/TA-ITERATIONS.md index 1a9847c..fc4ffd6 100644 --- a/docs/trustable/assertions/TA-ITERATIONS.md +++ b/docs/trustable/assertions/TA-ITERATIONS.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -All constructed iterations of XYZ include source code, build instructions, tests, results and attestations. +All constructed iterations of JSON-Library include source code, build instructions, tests, results and attestations. diff --git a/docs/trustable/assertions/TA-METHODOLOGIES.md b/docs/trustable/assertions/TA-METHODOLOGIES.md index 8523d30..9b5f997 100644 --- a/docs/trustable/assertions/TA-METHODOLOGIES.md +++ b/docs/trustable/assertions/TA-METHODOLOGIES.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -Manual methodologies applied for XYZ by contributors, and their results, are managed according to specified objectives. +Manual methodologies applied for JSON-Library by contributors, and their results, are managed according to specified objectives. diff --git a/docs/trustable/assertions/TA-MISBEHAVIOURS.md b/docs/trustable/assertions/TA-MISBEHAVIOURS.md index 99aaebd..ab4e869 100644 --- a/docs/trustable/assertions/TA-MISBEHAVIOURS.md +++ b/docs/trustable/assertions/TA-MISBEHAVIOURS.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -Prohibited misbehaviours for XYZ are identified, and mitigations are specified, verified and validated based on analysis. +Prohibited misbehaviours for JSON-Library are identified, and mitigations are specified, verified and validated based on analysis. diff --git a/docs/trustable/assertions/TA-RELEASES.md b/docs/trustable/assertions/TA-RELEASES.md index 00d939e..49b1597 100644 --- a/docs/trustable/assertions/TA-RELEASES.md +++ b/docs/trustable/assertions/TA-RELEASES.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -Construction of XYZ releases is fully repeatable and the results are fully reproducible, with any exceptions documented and justified. +Construction of JSON-Library releases is fully repeatable and the results are fully reproducible, with any exceptions documented and justified. diff --git a/docs/trustable/assertions/TA-SUPPLY_CHAIN.md b/docs/trustable/assertions/TA-SUPPLY_CHAIN.md index bc3c297..4e6a3fc 100644 --- a/docs/trustable/assertions/TA-SUPPLY_CHAIN.md +++ b/docs/trustable/assertions/TA-SUPPLY_CHAIN.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -All sources for XYZ and tools are mirrored in our controlled environment. +All sources for JSON-Library and tools are mirrored in our controlled environment. diff --git a/docs/trustable/assertions/TA-TESTS.md b/docs/trustable/assertions/TA-TESTS.md index 7cd1e24..89afde5 100644 --- a/docs/trustable/assertions/TA-TESTS.md +++ b/docs/trustable/assertions/TA-TESTS.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -All tests for XYZ, and its build and test environments, are constructed from controlled/mirrored sources and are reproducible, with any exceptions documented. +All tests for JSON-Library, and its build and test environments, are constructed from controlled/mirrored sources and are reproducible, with any exceptions documented. diff --git a/docs/trustable/assertions/TA-UPDATES.md b/docs/trustable/assertions/TA-UPDATES.md index f7d008d..fb0711e 100644 --- a/docs/trustable/assertions/TA-UPDATES.md +++ b/docs/trustable/assertions/TA-UPDATES.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -XYZ components, configurations and tools are updated under specified change and configuration management controls. +JSON-Library components, configurations and tools are updated under specified change and configuration management controls. diff --git a/docs/trustable/tenets/TT-CHANGES.md b/docs/trustable/tenets/TT-CHANGES.md index 2234874..17a15c5 100644 --- a/docs/trustable/tenets/TT-CHANGES.md +++ b/docs/trustable/tenets/TT-CHANGES.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -XYZ is actively maintained, with regular updates to dependencies, and changes are verified to prevent regressions. +JSON-Library is actively maintained, with regular updates to dependencies, and changes are verified to prevent regressions. diff --git a/docs/trustable/tenets/TT-CONFIDENCE.md b/docs/trustable/tenets/TT-CONFIDENCE.md index 925fdaa..0171966 100644 --- a/docs/trustable/tenets/TT-CONFIDENCE.md +++ b/docs/trustable/tenets/TT-CONFIDENCE.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -Confidence in XYZ is achieved by measuring and analysing behaviour and evidence over time. +Confidence in JSON-Library is achieved by measuring and analysing behaviour and evidence over time. diff --git a/docs/trustable/tenets/TT-CONSTRUCTION.md b/docs/trustable/tenets/TT-CONSTRUCTION.md index 1d598c4..9c90952 100644 --- a/docs/trustable/tenets/TT-CONSTRUCTION.md +++ b/docs/trustable/tenets/TT-CONSTRUCTION.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -Tools are provided to build XYZ from trusted sources (also provided) with full reproducibility. +Tools are provided to build JSON-Library from trusted sources (also provided) with full reproducibility. diff --git a/docs/trustable/tenets/TT-EXPECTATIONS.md b/docs/trustable/tenets/TT-EXPECTATIONS.md index 16e67bc..8b64124 100644 --- a/docs/trustable/tenets/TT-EXPECTATIONS.md +++ b/docs/trustable/tenets/TT-EXPECTATIONS.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -Documentation is provided, specifying what XYZ is expected to do, and what it must not do, and how this is verified. +Documentation is provided, specifying what JSON-Library is expected to do, and what it must not do, and how this is verified. diff --git a/docs/trustable/tenets/TT-PROVENANCE.md b/docs/trustable/tenets/TT-PROVENANCE.md index d7bb8c0..4e2b3b2 100644 --- a/docs/trustable/tenets/TT-PROVENANCE.md +++ b/docs/trustable/tenets/TT-PROVENANCE.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -All inputs (and attestations for claims) for XYZ are provided with known provenance. +All inputs (and attestations for claims) for JSON-Library are provided with known provenance. diff --git a/docs/trustable/tenets/TT-RESULTS.md b/docs/trustable/tenets/TT-RESULTS.md index fe67d03..8cac9e3 100644 --- a/docs/trustable/tenets/TT-RESULTS.md +++ b/docs/trustable/tenets/TT-RESULTS.md @@ -3,4 +3,4 @@ level: 1.1 normative: true --- -Evidence is provided to demonstrate that XYZ does what it is supposed to do, and does not do what it must not do. +Evidence is provided to demonstrate that JSON-Library does what it is supposed to do, and does not do what it must not do. From c1d6bf3ff00e2906c0542da6ebf9a808c9ac5908 Mon Sep 17 00:00:00 2001 From: Lennart Becker Date: Mon, 7 Jul 2025 15:13:02 +0000 Subject: [PATCH 09/10] improve: md to rst file conversion -add conversion from md to rst -add processing for suitable layout in score -add rst files that were adapted -add WFJ with reference as example Evidence --- .dotstop.dot | 2 + docs/index.rst | 1 + .../json-library-assertions/WFJ-01.md | 14 + docs/trustable/trudag/TA.md | 448 +++++ docs/trustable/trudag/TRUSTABLE.md | 32 + docs/trustable/trudag/TT.md | 178 ++ docs/trustable/trudag/WFJ.md | 1733 ++++++++++++++++ docs/trustable/trudag/dashboard.md | 91 + docs/trustable/trudag/figs/Lennart_hist.svg | 708 +++++++ docs/trustable/trudag/figs/all_hist.svg | 727 +++++++ docs/trustable/trudag/figs/evidence_hist.svg | 739 +++++++ .../trudag/figs/expectations_hist.svg | 708 +++++++ docs/trustable/trudag/m2r2_test/TA.rst | 532 +++++ docs/trustable/trudag/m2r2_test/TRUSTABLE.rst | 39 + docs/trustable/trudag/m2r2_test/TT.rst | 217 ++ docs/trustable/trudag/m2r2_test/WFJ.rst | 1747 +++++++++++++++++ docs/trustable/trudag/m2r2_test/dashboard.rst | 137 ++ docs/trustable/trudag/m2r2_test/nav.rst | 11 + docs/trustable/trudag/m2r2_test/processing.py | 279 +++ .../trustable_report_for_json_library.rst | 153 ++ docs/trustable/trudag/nav.md | 6 + .../trustable_report_for_json_library.md | 76 + docs/trustable/trudag_report.rst | 30 + 23 files changed, 8608 insertions(+) create mode 100644 docs/trustable/json-library-assertions/WFJ-01.md create mode 100644 docs/trustable/trudag/TA.md create mode 100644 docs/trustable/trudag/TRUSTABLE.md create mode 100644 docs/trustable/trudag/TT.md create mode 100644 docs/trustable/trudag/WFJ.md create mode 100644 docs/trustable/trudag/dashboard.md create mode 100644 docs/trustable/trudag/figs/Lennart_hist.svg create mode 100644 docs/trustable/trudag/figs/all_hist.svg create mode 100644 docs/trustable/trudag/figs/evidence_hist.svg create mode 100644 docs/trustable/trudag/figs/expectations_hist.svg create mode 100644 docs/trustable/trudag/m2r2_test/TA.rst create mode 100644 docs/trustable/trudag/m2r2_test/TRUSTABLE.rst create mode 100644 docs/trustable/trudag/m2r2_test/TT.rst create mode 100644 docs/trustable/trudag/m2r2_test/WFJ.rst create mode 100644 docs/trustable/trudag/m2r2_test/dashboard.rst create mode 100644 docs/trustable/trudag/m2r2_test/nav.rst create mode 100644 docs/trustable/trudag/m2r2_test/processing.py create mode 100644 docs/trustable/trudag/m2r2_test/trustable_report_for_json_library.rst create mode 100644 docs/trustable/trudag/nav.md create mode 100644 docs/trustable/trudag/trustable_report_for_json_library.md create mode 100644 docs/trustable/trudag_report.rst diff --git a/.dotstop.dot b/.dotstop.dot index ec25ec5..2f7e784 100644 --- a/.dotstop.dot +++ b/.dotstop.dot @@ -25,8 +25,10 @@ digraph G { "TT-EXPECTATIONS" [sha="362eb86c872fb76b2a1075ff978252112bbad0a5fb3041895381f8c76b64c5e6"]; "TT-PROVENANCE" [sha="4607bf9c0527508673fa37c110b9cf5f2ff0567c324cf728623f1e8ff094be32"]; "TT-RESULTS" [sha="382987171ac6dc0d1114f5c0dbb77452300c14596514bbab126ae1f7d1bbb842"]; +"WFJ-01" [sha="82f699582ecb4aea047df8f1b0908f3c0e3acec9896f61e5edd102bf1616ac4b"]; "TT-CHANGES" -> "TA-FIXES" [sha=d9dc2ad1dcbfde839249e8df9eb89ef978bdfee7b7b4933fae12e10cbc91762b]; "TT-CHANGES" -> "TA-UPDATES" [sha=f8948db2f344f4cdd5bdc71dc54e63b446f40af09235c37f5d5cf59dcfdfbfa0]; +"TA-BEHAVIOURS" -> "WFJ-01" [sha="0f6cc204479d56914d30a93913ac013122faea07b31e1174058cc99296e51106"]; "TRUSTABLE-SOFTWARE" -> "TT-CHANGES" [sha=a526e6de925b57edddfbc350de334735ee7ef23828b9e66ba781e8633c9f72df]; "TRUSTABLE-SOFTWARE" -> "TT-CONFIDENCE" [sha="07cdcfab2c8c5121dd0acecf3771ee674dde8663e4cb335cfb74aa774f10cc5b"]; "TRUSTABLE-SOFTWARE" -> "TT-CONSTRUCTION" [sha="8598c4138e9dda4691a3cbc1613530bb1a3f1c163edf523e41a9ba532b98fe83"]; diff --git a/docs/index.rst b/docs/index.rst index 937292d..a9ee503 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -36,6 +36,7 @@ This repository provides the aspired setup for projects using **C++** and **Baze trustable/concept.rst trustable/tenets/index.rst trustable/report.rst + trustable/trudag_report.rst Eclipse General Requirements diff --git a/docs/trustable/json-library-assertions/WFJ-01.md b/docs/trustable/json-library-assertions/WFJ-01.md new file mode 100644 index 0000000..8a1b3c4 --- /dev/null +++ b/docs/trustable/json-library-assertions/WFJ-01.md @@ -0,0 +1,14 @@ +--- +level: 1.1 +normative: true +references: + - type: "file" + path: "nlohmann_json/tests/src/unit-class_parser.cpp" + - type: "cpp-test" + path: "nlohmann_json/tests/src/unit-class_parser.cpp" + test_name: "parse" +score: + Lennart: 1.0 +--- + +The service checks for the four primitive types (strings, numbers, booleans, null). diff --git a/docs/trustable/trudag/TA.md b/docs/trustable/trudag/TA.md new file mode 100644 index 0000000..f5a8ace --- /dev/null +++ b/docs/trustable/trudag/TA.md @@ -0,0 +1,448 @@ + + +--- + +### TA-ANALYSIS ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Collected data from tests and monitoring of deployed software is analysed according to specified objectives. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-RESULTS](TT.md#tt-results){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-BEHAVIOURS ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Expected or required behaviours for JSON-Library are identified, specified, verified and validated based on analysis. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-EXPECTATIONS](TT.md#tt-expectations){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +- [WFJ-01](WFJ.md#wfj-01){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)" .status-unreviewed} + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-CONFIDENCE ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Confidence in JSON-Library is measured based on results of analysis. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-CONFIDENCE](TT.md#tt-confidence){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-CONSTRAINTS ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Constraints on adaptation and deployment of JSON-Library are specified. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-EXPECTATIONS](TT.md#tt-expectations){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-DATA ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Data is collected from tests, and from monitoring of deployed software, according to specified objectives. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-RESULTS](TT.md#tt-results){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-FIXES ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Known bugs or misbehaviours are analysed and triaged, and critical fixes or mitigations are implemented or applied. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-CHANGES](TT.md#tt-changes){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-INDICATORS ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Advance warning indicators for misbehaviours are identified, and monitoring mechanisms are specified, verified and validated based on analysis. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-EXPECTATIONS](TT.md#tt-expectations){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-INPUTS ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +All inputs to JSON-Library are assessed, to identify potential risks and issues. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-PROVENANCE](TT.md#tt-provenance){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-ITERATIONS ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +All constructed iterations of JSON-Library include source code, build instructions, tests, results and attestations. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-CONSTRUCTION](TT.md#tt-construction){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-METHODOLOGIES ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Manual methodologies applied for JSON-Library by contributors, and their results, are managed according to specified objectives. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-CONFIDENCE](TT.md#tt-confidence){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-MISBEHAVIOURS ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Prohibited misbehaviours for JSON-Library are identified, and mitigations are specified, verified and validated based on analysis. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-EXPECTATIONS](TT.md#tt-expectations){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-RELEASES ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Construction of JSON-Library releases is fully repeatable and the results are fully reproducible, with any exceptions documented and justified. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-CONSTRUCTION](TT.md#tt-construction){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-SUPPLY_CHAIN ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +All sources for JSON-Library and tools are mirrored in our controlled environment. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-PROVENANCE](TT.md#tt-provenance){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-TESTS ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +All tests for JSON-Library, and its build and test environments, are constructed from controlled/mirrored sources and are reproducible, with any exceptions documented. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-CONSTRUCTION](TT.md#tt-construction){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-UPDATES ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +JSON-Library components, configurations and tools are updated under specified change and configuration management controls. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-CHANGES](TT.md#tt-changes){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TA-VALIDATION ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +All specified tests are executed repeatedly, under defined conditions in controlled environments, according to specified objectives. +{: .expanded-item-element } + +**Supported Requests:** + +- [TT-RESULTS](TT.md#tt-results){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ diff --git a/docs/trustable/trudag/TRUSTABLE.md b/docs/trustable/trudag/TRUSTABLE.md new file mode 100644 index 0000000..01884d7 --- /dev/null +++ b/docs/trustable/trudag/TRUSTABLE.md @@ -0,0 +1,32 @@ + + +--- + +### TRUSTABLE-SOFTWARE ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +This release of JSON-Library is Trustable. +{: .expanded-item-element } + +**Supported Requests:** + + +**Supporting Items:** + +- [TT-CHANGES](TT.md#tt-changes){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TT-CONFIDENCE](TT.md#tt-confidence){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TT-CONSTRUCTION](TT.md#tt-construction){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TT-EXPECTATIONS](TT.md#tt-expectations){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TT-PROVENANCE](TT.md#tt-provenance){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TT-RESULTS](TT.md#tt-results){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ diff --git a/docs/trustable/trudag/TT.md b/docs/trustable/trudag/TT.md new file mode 100644 index 0000000..d117db6 --- /dev/null +++ b/docs/trustable/trudag/TT.md @@ -0,0 +1,178 @@ + + +--- + +### TT-CHANGES ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +JSON-Library is actively maintained, with regular updates to dependencies, and changes are verified to prevent regressions. +{: .expanded-item-element } + +**Supported Requests:** + +- [TRUSTABLE-SOFTWARE](TRUSTABLE.md#trustable-software){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +- [TA-FIXES](TA.md#ta-fixes){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TA-UPDATES](TA.md#ta-updates){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TT-CONFIDENCE ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Confidence in JSON-Library is achieved by measuring and analysing behaviour and evidence over time. +{: .expanded-item-element } + +**Supported Requests:** + +- [TRUSTABLE-SOFTWARE](TRUSTABLE.md#trustable-software){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +- [TA-CONFIDENCE](TA.md#ta-confidence){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TA-METHODOLOGIES](TA.md#ta-methodologies){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TT-CONSTRUCTION ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Tools are provided to build JSON-Library from trusted sources (also provided) with full reproducibility. +{: .expanded-item-element } + +**Supported Requests:** + +- [TRUSTABLE-SOFTWARE](TRUSTABLE.md#trustable-software){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +- [TA-ITERATIONS](TA.md#ta-iterations){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TA-RELEASES](TA.md#ta-releases){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TA-TESTS](TA.md#ta-tests){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TT-EXPECTATIONS ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Documentation is provided, specifying what JSON-Library is expected to do, and what it must not do, and how this is verified. +{: .expanded-item-element } + +**Supported Requests:** + +- [TRUSTABLE-SOFTWARE](TRUSTABLE.md#trustable-software){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +- [TA-BEHAVIOURS](TA.md#ta-behaviours){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TA-CONSTRAINTS](TA.md#ta-constraints){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TA-INDICATORS](TA.md#ta-indicators){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TA-MISBEHAVIOURS](TA.md#ta-misbehaviours){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TT-PROVENANCE ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +All inputs (and attestations for claims) for JSON-Library are provided with known provenance. +{: .expanded-item-element } + +**Supported Requests:** + +- [TRUSTABLE-SOFTWARE](TRUSTABLE.md#trustable-software){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +- [TA-INPUTS](TA.md#ta-inputs){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TA-SUPPLY_CHAIN](TA.md#ta-supply_chain){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ + + +--- + +### TT-RESULTS ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +Evidence is provided to demonstrate that JSON-Library does what it is supposed to do, and does not do what it must not do. +{: .expanded-item-element } + +**Supported Requests:** + +- [TRUSTABLE-SOFTWARE](TRUSTABLE.md#trustable-software){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +- [TA-ANALYSIS](TA.md#ta-analysis){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TA-DATA](TA.md#ta-data){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} +- [TA-VALIDATION](TA.md#ta-validation){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +{% raw %} + +**References:** + +_None_ + +{% endraw %} + +**Fallacies:** + +_None_ diff --git a/docs/trustable/trudag/WFJ.md b/docs/trustable/trudag/WFJ.md new file mode 100644 index 0000000..d3c7d48 --- /dev/null +++ b/docs/trustable/trudag/WFJ.md @@ -0,0 +1,1733 @@ + + +--- + +### WFJ-01 ### {: .item-element .item-section class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)" .status-unreviewed} + +The service checks for the four primitive types (strings, numbers, booleans, null). +{: .expanded-item-element } + +**Supported Requests:** + +- [TA-BEHAVIOURS](TA.md#ta-behaviours){.item-element class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + +**Supporting Items:** + +_None_ + +{% raw %} + +**References:** + +- `nlohmann_json/tests/src/unit-class_parser.cpp` + + ??? "Click to view reference" + + ````cpp + // __ _____ _____ _____ + // __| | __| | | | JSON for Modern C++ (supporting code) + // | | |__ | | | | | | version 3.12.0 + // |_____|_____|_____|_|___| https://github.com/nlohmann/json + // + // SPDX-FileCopyrightText: 2013 - 2025 Niels Lohmann + // SPDX-License-Identifier: MIT + + #include "doctest_compatibility.h" + + #define JSON_TESTS_PRIVATE + #include + using nlohmann::json; + #ifdef JSON_TEST_NO_GLOBAL_UDLS + using namespace nlohmann::literals; // NOLINT(google-build-using-namespace) + #endif + + #include + + namespace + { + class SaxEventLogger + { + public: + bool null() + { + events.emplace_back("null()"); + return true; + } + + bool boolean(bool val) + { + events.emplace_back(val ? "boolean(true)" : "boolean(false)"); + return true; + } + + bool number_integer(json::number_integer_t val) + { + events.push_back("number_integer(" + std::to_string(val) + ")"); + return true; + } + + bool number_unsigned(json::number_unsigned_t val) + { + events.push_back("number_unsigned(" + std::to_string(val) + ")"); + return true; + } + + bool number_float(json::number_float_t /*unused*/, const std::string& s) + { + events.push_back("number_float(" + s + ")"); + return true; + } + + bool string(std::string& val) + { + events.push_back("string(" + val + ")"); + return true; + } + + bool binary(json::binary_t& val) + { + std::string binary_contents = "binary("; + std::string comma_space; + for (auto b : val) + { + binary_contents.append(comma_space); + binary_contents.append(std::to_string(static_cast(b))); + comma_space = ", "; + } + binary_contents.append(")"); + events.push_back(binary_contents); + return true; + } + + bool start_object(std::size_t elements) + { + if (elements == (std::numeric_limits::max)()) + { + events.emplace_back("start_object()"); + } + else + { + events.push_back("start_object(" + std::to_string(elements) + ")"); + } + return true; + } + + bool key(std::string& val) + { + events.push_back("key(" + val + ")"); + return true; + } + + bool end_object() + { + events.emplace_back("end_object()"); + return true; + } + + bool start_array(std::size_t elements) + { + if (elements == (std::numeric_limits::max)()) + { + events.emplace_back("start_array()"); + } + else + { + events.push_back("start_array(" + std::to_string(elements) + ")"); + } + return true; + } + + bool end_array() + { + events.emplace_back("end_array()"); + return true; + } + + bool parse_error(std::size_t position, const std::string& /*unused*/, const json::exception& /*unused*/) + { + errored = true; + events.push_back("parse_error(" + std::to_string(position) + ")"); + return false; + } + + std::vector events {}; // NOLINT(readability-redundant-member-init) + bool errored = false; + }; + + class SaxCountdown : public nlohmann::json::json_sax_t + { + public: + explicit SaxCountdown(const int count) : events_left(count) + {} + + bool null() override + { + return events_left-- > 0; + } + + bool boolean(bool /*val*/) override + { + return events_left-- > 0; + } + + bool number_integer(json::number_integer_t /*val*/) override + { + return events_left-- > 0; + } + + bool number_unsigned(json::number_unsigned_t /*val*/) override + { + return events_left-- > 0; + } + + bool number_float(json::number_float_t /*val*/, const std::string& /*s*/) override + { + return events_left-- > 0; + } + + bool string(std::string& /*val*/) override + { + return events_left-- > 0; + } + + bool binary(json::binary_t& /*val*/) override + { + return events_left-- > 0; + } + + bool start_object(std::size_t /*elements*/) override + { + return events_left-- > 0; + } + + bool key(std::string& /*val*/) override + { + return events_left-- > 0; + } + + bool end_object() override + { + return events_left-- > 0; + } + + bool start_array(std::size_t /*elements*/) override + { + return events_left-- > 0; + } + + bool end_array() override + { + return events_left-- > 0; + } + + bool parse_error(std::size_t /*position*/, const std::string& /*last_token*/, const json::exception& /*ex*/) override + { + return false; + } + + private: + int events_left = 0; + }; + + json parser_helper(const std::string& s); + bool accept_helper(const std::string& s); + void comments_helper(const std::string& s); + + json parser_helper(const std::string& s) + { + json j; + json::parser(nlohmann::detail::input_adapter(s)).parse(true, j); + + // if this line was reached, no exception occurred + // -> check if result is the same without exceptions + json j_nothrow; + CHECK_NOTHROW(json::parser(nlohmann::detail::input_adapter(s), nullptr, false).parse(true, j_nothrow)); + CHECK(j_nothrow == j); + + json j_sax; + nlohmann::detail::json_sax_dom_parser sdp(j_sax); + json::sax_parse(s, &sdp); + CHECK(j_sax == j); + + comments_helper(s); + + return j; + } + + bool accept_helper(const std::string& s) + { + CAPTURE(s) + + // 1. parse s without exceptions + json j; + CHECK_NOTHROW(json::parser(nlohmann::detail::input_adapter(s), nullptr, false).parse(true, j)); + const bool ok_noexcept = !j.is_discarded(); + + // 2. accept s + const bool ok_accept = json::parser(nlohmann::detail::input_adapter(s)).accept(true); + + // 3. check if both approaches come to the same result + CHECK(ok_noexcept == ok_accept); + + // 4. parse with SAX (compare with relaxed accept result) + SaxEventLogger el; + CHECK_NOTHROW(json::sax_parse(s, &el, json::input_format_t::json, false)); + CHECK(json::parser(nlohmann::detail::input_adapter(s)).accept(false) == !el.errored); + + // 5. parse with simple callback + json::parser_callback_t const cb = [](int /*unused*/, json::parse_event_t /*unused*/, json& /*unused*/) noexcept + { + return true; + }; + json const j_cb = json::parse(s, cb, false); + const bool ok_noexcept_cb = !j_cb.is_discarded(); + + // 6. check if this approach came to the same result + CHECK(ok_noexcept == ok_noexcept_cb); + + // 7. check if comments are properly ignored + if (ok_accept) + { + comments_helper(s); + } + + // 8. return result + return ok_accept; + } + + void comments_helper(const std::string& s) + { + json _; + + // parse/accept with default parser + CHECK_NOTHROW(_ = json::parse(s)); + CHECK(json::accept(s)); + + // parse/accept while skipping comments + CHECK_NOTHROW(_ = json::parse(s, nullptr, false, true)); + CHECK(json::accept(s, true)); + + std::vector json_with_comments; + + // start with a comment + json_with_comments.push_back(std::string("// this is a comment\n") + s); + json_with_comments.push_back(std::string("/* this is a comment */") + s); + // end with a comment + json_with_comments.push_back(s + "// this is a comment"); + json_with_comments.push_back(s + "/* this is a comment */"); + + // check all strings + for (const auto& json_with_comment : json_with_comments) + { + CAPTURE(json_with_comment) + CHECK_THROWS_AS(_ = json::parse(json_with_comment), json::parse_error); + CHECK(!json::accept(json_with_comment)); + + CHECK_NOTHROW(_ = json::parse(json_with_comment, nullptr, true, true)); + CHECK(json::accept(json_with_comment, true)); + } + } + + } // namespace + + TEST_CASE("parser class") + { + SECTION("parse") + { + SECTION("null") + { + CHECK(parser_helper("null") == json(nullptr)); + } + + SECTION("true") + { + CHECK(parser_helper("true") == json(true)); + } + + SECTION("false") + { + CHECK(parser_helper("false") == json(false)); + } + + SECTION("array") + { + SECTION("empty array") + { + CHECK(parser_helper("[]") == json(json::value_t::array)); + CHECK(parser_helper("[ ]") == json(json::value_t::array)); + } + + SECTION("nonempty array") + { + CHECK(parser_helper("[true, false, null]") == json({true, false, nullptr})); + } + } + + SECTION("object") + { + SECTION("empty object") + { + CHECK(parser_helper("{}") == json(json::value_t::object)); + CHECK(parser_helper("{ }") == json(json::value_t::object)); + } + + SECTION("nonempty object") + { + CHECK(parser_helper("{\"\": true, \"one\": 1, \"two\": null}") == json({{"", true}, {"one", 1}, {"two", nullptr}})); + } + } + + SECTION("string") + { + // empty string + CHECK(parser_helper("\"\"") == json(json::value_t::string)); + + SECTION("errors") + { + // error: tab in string + CHECK_THROWS_WITH_AS(parser_helper("\"\t\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0009 (HT) must be escaped to \\u0009 or \\t; last read: '\"'", json::parse_error&); + // error: newline in string + CHECK_THROWS_WITH_AS(parser_helper("\"\n\""), "[json.exception.parse_error.101] parse error at line 2, column 0: syntax error while parsing value - invalid string: control character U+000A (LF) must be escaped to \\u000A or \\n; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\r\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000D (CR) must be escaped to \\u000D or \\r; last read: '\"'", json::parse_error&); + // error: backspace in string + CHECK_THROWS_WITH_AS(parser_helper("\"\b\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0008 (BS) must be escaped to \\u0008 or \\b; last read: '\"'", json::parse_error&); + // improve code coverage + CHECK_THROWS_AS(parser_helper("\uFF01"), json::parse_error&); + CHECK_THROWS_AS(parser_helper("[-4:1,]"), json::parse_error&); + // unescaped control characters + CHECK_THROWS_WITH_AS(parser_helper("\"\x00\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: missing closing quote; last read: '\"'", json::parse_error&); // NOLINT(bugprone-string-literal-with-embedded-nul) + CHECK_THROWS_WITH_AS(parser_helper("\"\x01\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0001 (SOH) must be escaped to \\u0001; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x02\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0002 (STX) must be escaped to \\u0002; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x03\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0003 (ETX) must be escaped to \\u0003; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x04\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0004 (EOT) must be escaped to \\u0004; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x05\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0005 (ENQ) must be escaped to \\u0005; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x06\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0006 (ACK) must be escaped to \\u0006; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x07\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0007 (BEL) must be escaped to \\u0007; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x08\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0008 (BS) must be escaped to \\u0008 or \\b; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x09\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0009 (HT) must be escaped to \\u0009 or \\t; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0a\""), "[json.exception.parse_error.101] parse error at line 2, column 0: syntax error while parsing value - invalid string: control character U+000A (LF) must be escaped to \\u000A or \\n; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0b\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000B (VT) must be escaped to \\u000B; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0c\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000C (FF) must be escaped to \\u000C or \\f; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0d\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000D (CR) must be escaped to \\u000D or \\r; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0e\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000E (SO) must be escaped to \\u000E; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0f\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000F (SI) must be escaped to \\u000F; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x10\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0010 (DLE) must be escaped to \\u0010; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x11\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0011 (DC1) must be escaped to \\u0011; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x12\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0012 (DC2) must be escaped to \\u0012; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x13\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0013 (DC3) must be escaped to \\u0013; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x14\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0014 (DC4) must be escaped to \\u0014; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x15\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0015 (NAK) must be escaped to \\u0015; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x16\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0016 (SYN) must be escaped to \\u0016; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x17\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0017 (ETB) must be escaped to \\u0017; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x18\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0018 (CAN) must be escaped to \\u0018; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x19\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0019 (EM) must be escaped to \\u0019; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1a\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001A (SUB) must be escaped to \\u001A; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1b\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001B (ESC) must be escaped to \\u001B; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1c\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001C (FS) must be escaped to \\u001C; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1d\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001D (GS) must be escaped to \\u001D; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1e\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001E (RS) must be escaped to \\u001E; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1f\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001F (US) must be escaped to \\u001F; last read: '\"'", json::parse_error&); + + SECTION("additional test for null byte") + { + // The test above for the null byte is wrong, because passing + // a string to the parser only reads int until it encounters + // a null byte. This test inserts the null byte later on and + // uses an iterator range. + std::string s = "\"1\""; + s[1] = '\0'; + json _; + CHECK_THROWS_WITH_AS(_ = json::parse(s.begin(), s.end()), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0000 (NUL) must be escaped to \\u0000; last read: '\"'", json::parse_error&); + } + } + + SECTION("escaped") + { + // quotation mark "\"" + auto r1 = R"("\"")"_json; + CHECK(parser_helper("\"\\\"\"") == r1); + // reverse solidus "\\" + auto r2 = R"("\\")"_json; + CHECK(parser_helper("\"\\\\\"") == r2); + // solidus + CHECK(parser_helper("\"\\/\"") == R"("/")"_json); + // backspace + CHECK(parser_helper("\"\\b\"") == json("\b")); + // formfeed + CHECK(parser_helper("\"\\f\"") == json("\f")); + // newline + CHECK(parser_helper("\"\\n\"") == json("\n")); + // carriage return + CHECK(parser_helper("\"\\r\"") == json("\r")); + // horizontal tab + CHECK(parser_helper("\"\\t\"") == json("\t")); + + CHECK(parser_helper("\"\\u0001\"").get() == "\x01"); + CHECK(parser_helper("\"\\u000a\"").get() == "\n"); + CHECK(parser_helper("\"\\u00b0\"").get() == "°"); + CHECK(parser_helper("\"\\u0c00\"").get() == "ఀ"); + CHECK(parser_helper("\"\\ud000\"").get() == "퀀"); + CHECK(parser_helper("\"\\u000E\"").get() == "\x0E"); + CHECK(parser_helper("\"\\u00F0\"").get() == "ð"); + CHECK(parser_helper("\"\\u0100\"").get() == "Ā"); + CHECK(parser_helper("\"\\u2000\"").get() == " "); + CHECK(parser_helper("\"\\uFFFF\"").get() == "￿"); + CHECK(parser_helper("\"\\u20AC\"").get() == "€"); + CHECK(parser_helper("\"€\"").get() == "€"); + CHECK(parser_helper("\"🎈\"").get() == "🎈"); + + CHECK(parser_helper("\"\\ud80c\\udc60\"").get() == "\xf0\x93\x81\xa0"); + CHECK(parser_helper("\"\\ud83c\\udf1e\"").get() == "🌞"); + } + } + + SECTION("number") + { + SECTION("integers") + { + SECTION("without exponent") + { + CHECK(parser_helper("-128") == json(-128)); + CHECK(parser_helper("-0") == json(-0)); + CHECK(parser_helper("0") == json(0)); + CHECK(parser_helper("128") == json(128)); + } + + SECTION("with exponent") + { + CHECK(parser_helper("0e1") == json(0e1)); + CHECK(parser_helper("0E1") == json(0e1)); + + CHECK(parser_helper("10000E-4") == json(10000e-4)); + CHECK(parser_helper("10000E-3") == json(10000e-3)); + CHECK(parser_helper("10000E-2") == json(10000e-2)); + CHECK(parser_helper("10000E-1") == json(10000e-1)); + CHECK(parser_helper("10000E0") == json(10000e0)); + CHECK(parser_helper("10000E1") == json(10000e1)); + CHECK(parser_helper("10000E2") == json(10000e2)); + CHECK(parser_helper("10000E3") == json(10000e3)); + CHECK(parser_helper("10000E4") == json(10000e4)); + + CHECK(parser_helper("10000e-4") == json(10000e-4)); + CHECK(parser_helper("10000e-3") == json(10000e-3)); + CHECK(parser_helper("10000e-2") == json(10000e-2)); + CHECK(parser_helper("10000e-1") == json(10000e-1)); + CHECK(parser_helper("10000e0") == json(10000e0)); + CHECK(parser_helper("10000e1") == json(10000e1)); + CHECK(parser_helper("10000e2") == json(10000e2)); + CHECK(parser_helper("10000e3") == json(10000e3)); + CHECK(parser_helper("10000e4") == json(10000e4)); + + CHECK(parser_helper("-0e1") == json(-0e1)); + CHECK(parser_helper("-0E1") == json(-0e1)); + CHECK(parser_helper("-0E123") == json(-0e123)); + + // numbers after exponent + CHECK(parser_helper("10E0") == json(10e0)); + CHECK(parser_helper("10E1") == json(10e1)); + CHECK(parser_helper("10E2") == json(10e2)); + CHECK(parser_helper("10E3") == json(10e3)); + CHECK(parser_helper("10E4") == json(10e4)); + CHECK(parser_helper("10E5") == json(10e5)); + CHECK(parser_helper("10E6") == json(10e6)); + CHECK(parser_helper("10E7") == json(10e7)); + CHECK(parser_helper("10E8") == json(10e8)); + CHECK(parser_helper("10E9") == json(10e9)); + CHECK(parser_helper("10E+0") == json(10e0)); + CHECK(parser_helper("10E+1") == json(10e1)); + CHECK(parser_helper("10E+2") == json(10e2)); + CHECK(parser_helper("10E+3") == json(10e3)); + CHECK(parser_helper("10E+4") == json(10e4)); + CHECK(parser_helper("10E+5") == json(10e5)); + CHECK(parser_helper("10E+6") == json(10e6)); + CHECK(parser_helper("10E+7") == json(10e7)); + CHECK(parser_helper("10E+8") == json(10e8)); + CHECK(parser_helper("10E+9") == json(10e9)); + CHECK(parser_helper("10E-1") == json(10e-1)); + CHECK(parser_helper("10E-2") == json(10e-2)); + CHECK(parser_helper("10E-3") == json(10e-3)); + CHECK(parser_helper("10E-4") == json(10e-4)); + CHECK(parser_helper("10E-5") == json(10e-5)); + CHECK(parser_helper("10E-6") == json(10e-6)); + CHECK(parser_helper("10E-7") == json(10e-7)); + CHECK(parser_helper("10E-8") == json(10e-8)); + CHECK(parser_helper("10E-9") == json(10e-9)); + } + + SECTION("edge cases") + { + // From RFC8259, Section 6: + // Note that when such software is used, numbers that are + // integers and are in the range [-(2**53)+1, (2**53)-1] + // are interoperable in the sense that implementations will + // agree exactly on their numeric values. + + // -(2**53)+1 + CHECK(parser_helper("-9007199254740991").get() == -9007199254740991); + // (2**53)-1 + CHECK(parser_helper("9007199254740991").get() == 9007199254740991); + } + + SECTION("over the edge cases") // issue #178 - Integer conversion to unsigned (incorrect handling of 64-bit integers) + { + // While RFC8259, Section 6 specifies a preference for support + // for ranges in range of IEEE 754-2008 binary64 (double precision) + // this does not accommodate 64-bit integers without loss of accuracy. + // As 64-bit integers are now widely used in software, it is desirable + // to expand support to the full 64 bit (signed and unsigned) range + // i.e. -(2**63) -> (2**64)-1. + + // -(2**63) ** Note: compilers see negative literals as negated positive numbers (hence the -1)) + CHECK(parser_helper("-9223372036854775808").get() == -9223372036854775807 - 1); + // (2**63)-1 + CHECK(parser_helper("9223372036854775807").get() == 9223372036854775807); + // (2**64)-1 + CHECK(parser_helper("18446744073709551615").get() == 18446744073709551615u); + } + } + + SECTION("floating-point") + { + SECTION("without exponent") + { + CHECK(parser_helper("-128.5") == json(-128.5)); + CHECK(parser_helper("0.999") == json(0.999)); + CHECK(parser_helper("128.5") == json(128.5)); + CHECK(parser_helper("-0.0") == json(-0.0)); + } + + SECTION("with exponent") + { + CHECK(parser_helper("-128.5E3") == json(-128.5E3)); + CHECK(parser_helper("-128.5E-3") == json(-128.5E-3)); + CHECK(parser_helper("-0.0e1") == json(-0.0e1)); + CHECK(parser_helper("-0.0E1") == json(-0.0e1)); + } + } + + SECTION("overflow") + { + // overflows during parsing yield an exception + CHECK_THROWS_WITH_AS(parser_helper("1.18973e+4932").empty(), "[json.exception.out_of_range.406] number overflow parsing '1.18973e+4932'", json::out_of_range&); + } + + SECTION("invalid numbers") + { + // numbers must not begin with "+" + CHECK_THROWS_AS(parser_helper("+1"), json::parse_error&); + CHECK_THROWS_AS(parser_helper("+0"), json::parse_error&); + + CHECK_THROWS_WITH_AS(parser_helper("01"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - unexpected number literal; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-01"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - unexpected number literal; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("--1"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid number; expected digit after '-'; last read: '--'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1."), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected digit after '.'; last read: '1.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1E"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1E'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1E-"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid number; expected digit after exponent sign; last read: '1E-'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1.E1"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected digit after '.'; last read: '1.E'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-1E"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '-1E'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0E#"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '-0E#'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0E-#"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid number; expected digit after exponent sign; last read: '-0E-#'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0#"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid literal; last read: '-0#'; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0.0:"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - unexpected ':'; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0.0Z"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid literal; last read: '-0.0Z'; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0E123:"), + "[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - unexpected ':'; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0e0-:"), + "[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid number; expected digit after '-'; last read: '-:'; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0e-:"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid number; expected digit after exponent sign; last read: '-0e-:'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0f"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: '-0f'; expected end of input", json::parse_error&); + } + } + } + + SECTION("accept") + { + SECTION("null") + { + CHECK(accept_helper("null")); + } + + SECTION("true") + { + CHECK(accept_helper("true")); + } + + SECTION("false") + { + CHECK(accept_helper("false")); + } + + SECTION("array") + { + SECTION("empty array") + { + CHECK(accept_helper("[]")); + CHECK(accept_helper("[ ]")); + } + + SECTION("nonempty array") + { + CHECK(accept_helper("[true, false, null]")); + } + } + + SECTION("object") + { + SECTION("empty object") + { + CHECK(accept_helper("{}")); + CHECK(accept_helper("{ }")); + } + + SECTION("nonempty object") + { + CHECK(accept_helper("{\"\": true, \"one\": 1, \"two\": null}")); + } + } + + SECTION("string") + { + // empty string + CHECK(accept_helper("\"\"")); + + SECTION("errors") + { + // error: tab in string + CHECK(accept_helper("\"\t\"") == false); + // error: newline in string + CHECK(accept_helper("\"\n\"") == false); + CHECK(accept_helper("\"\r\"") == false); + // error: backspace in string + CHECK(accept_helper("\"\b\"") == false); + // improve code coverage + CHECK(accept_helper("\uFF01") == false); + CHECK(accept_helper("[-4:1,]") == false); + // unescaped control characters + CHECK(accept_helper("\"\x00\"") == false); // NOLINT(bugprone-string-literal-with-embedded-nul) + CHECK(accept_helper("\"\x01\"") == false); + CHECK(accept_helper("\"\x02\"") == false); + CHECK(accept_helper("\"\x03\"") == false); + CHECK(accept_helper("\"\x04\"") == false); + CHECK(accept_helper("\"\x05\"") == false); + CHECK(accept_helper("\"\x06\"") == false); + CHECK(accept_helper("\"\x07\"") == false); + CHECK(accept_helper("\"\x08\"") == false); + CHECK(accept_helper("\"\x09\"") == false); + CHECK(accept_helper("\"\x0a\"") == false); + CHECK(accept_helper("\"\x0b\"") == false); + CHECK(accept_helper("\"\x0c\"") == false); + CHECK(accept_helper("\"\x0d\"") == false); + CHECK(accept_helper("\"\x0e\"") == false); + CHECK(accept_helper("\"\x0f\"") == false); + CHECK(accept_helper("\"\x10\"") == false); + CHECK(accept_helper("\"\x11\"") == false); + CHECK(accept_helper("\"\x12\"") == false); + CHECK(accept_helper("\"\x13\"") == false); + CHECK(accept_helper("\"\x14\"") == false); + CHECK(accept_helper("\"\x15\"") == false); + CHECK(accept_helper("\"\x16\"") == false); + CHECK(accept_helper("\"\x17\"") == false); + CHECK(accept_helper("\"\x18\"") == false); + CHECK(accept_helper("\"\x19\"") == false); + CHECK(accept_helper("\"\x1a\"") == false); + CHECK(accept_helper("\"\x1b\"") == false); + CHECK(accept_helper("\"\x1c\"") == false); + CHECK(accept_helper("\"\x1d\"") == false); + CHECK(accept_helper("\"\x1e\"") == false); + CHECK(accept_helper("\"\x1f\"") == false); + } + + SECTION("escaped") + { + // quotation mark "\"" + auto r1 = R"("\"")"_json; + CHECK(accept_helper("\"\\\"\"")); + // reverse solidus "\\" + auto r2 = R"("\\")"_json; + CHECK(accept_helper("\"\\\\\"")); + // solidus + CHECK(accept_helper("\"\\/\"")); + // backspace + CHECK(accept_helper("\"\\b\"")); + // formfeed + CHECK(accept_helper("\"\\f\"")); + // newline + CHECK(accept_helper("\"\\n\"")); + // carriage return + CHECK(accept_helper("\"\\r\"")); + // horizontal tab + CHECK(accept_helper("\"\\t\"")); + + CHECK(accept_helper("\"\\u0001\"")); + CHECK(accept_helper("\"\\u000a\"")); + CHECK(accept_helper("\"\\u00b0\"")); + CHECK(accept_helper("\"\\u0c00\"")); + CHECK(accept_helper("\"\\ud000\"")); + CHECK(accept_helper("\"\\u000E\"")); + CHECK(accept_helper("\"\\u00F0\"")); + CHECK(accept_helper("\"\\u0100\"")); + CHECK(accept_helper("\"\\u2000\"")); + CHECK(accept_helper("\"\\uFFFF\"")); + CHECK(accept_helper("\"\\u20AC\"")); + CHECK(accept_helper("\"€\"")); + CHECK(accept_helper("\"🎈\"")); + + CHECK(accept_helper("\"\\ud80c\\udc60\"")); + CHECK(accept_helper("\"\\ud83c\\udf1e\"")); + } + } + + SECTION("number") + { + SECTION("integers") + { + SECTION("without exponent") + { + CHECK(accept_helper("-128")); + CHECK(accept_helper("-0")); + CHECK(accept_helper("0")); + CHECK(accept_helper("128")); + } + + SECTION("with exponent") + { + CHECK(accept_helper("0e1")); + CHECK(accept_helper("0E1")); + + CHECK(accept_helper("10000E-4")); + CHECK(accept_helper("10000E-3")); + CHECK(accept_helper("10000E-2")); + CHECK(accept_helper("10000E-1")); + CHECK(accept_helper("10000E0")); + CHECK(accept_helper("10000E1")); + CHECK(accept_helper("10000E2")); + CHECK(accept_helper("10000E3")); + CHECK(accept_helper("10000E4")); + + CHECK(accept_helper("10000e-4")); + CHECK(accept_helper("10000e-3")); + CHECK(accept_helper("10000e-2")); + CHECK(accept_helper("10000e-1")); + CHECK(accept_helper("10000e0")); + CHECK(accept_helper("10000e1")); + CHECK(accept_helper("10000e2")); + CHECK(accept_helper("10000e3")); + CHECK(accept_helper("10000e4")); + + CHECK(accept_helper("-0e1")); + CHECK(accept_helper("-0E1")); + CHECK(accept_helper("-0E123")); + } + + SECTION("edge cases") + { + // From RFC8259, Section 6: + // Note that when such software is used, numbers that are + // integers and are in the range [-(2**53)+1, (2**53)-1] + // are interoperable in the sense that implementations will + // agree exactly on their numeric values. + + // -(2**53)+1 + CHECK(accept_helper("-9007199254740991")); + // (2**53)-1 + CHECK(accept_helper("9007199254740991")); + } + + SECTION("over the edge cases") // issue #178 - Integer conversion to unsigned (incorrect handling of 64-bit integers) + { + // While RFC8259, Section 6 specifies a preference for support + // for ranges in range of IEEE 754-2008 binary64 (double precision) + // this does not accommodate 64 bit integers without loss of accuracy. + // As 64 bit integers are now widely used in software, it is desirable + // to expand support to the full 64 bit (signed and unsigned) range + // i.e. -(2**63) -> (2**64)-1. + + // -(2**63) ** Note: compilers see negative literals as negated positive numbers (hence the -1)) + CHECK(accept_helper("-9223372036854775808")); + // (2**63)-1 + CHECK(accept_helper("9223372036854775807")); + // (2**64)-1 + CHECK(accept_helper("18446744073709551615")); + } + } + + SECTION("floating-point") + { + SECTION("without exponent") + { + CHECK(accept_helper("-128.5")); + CHECK(accept_helper("0.999")); + CHECK(accept_helper("128.5")); + CHECK(accept_helper("-0.0")); + } + + SECTION("with exponent") + { + CHECK(accept_helper("-128.5E3")); + CHECK(accept_helper("-128.5E-3")); + CHECK(accept_helper("-0.0e1")); + CHECK(accept_helper("-0.0E1")); + } + } + + SECTION("overflow") + { + // overflows during parsing + CHECK(!accept_helper("1.18973e+4932")); + } + + SECTION("invalid numbers") + { + CHECK(accept_helper("01") == false); + CHECK(accept_helper("--1") == false); + CHECK(accept_helper("1.") == false); + CHECK(accept_helper("1E") == false); + CHECK(accept_helper("1E-") == false); + CHECK(accept_helper("1.E1") == false); + CHECK(accept_helper("-1E") == false); + CHECK(accept_helper("-0E#") == false); + CHECK(accept_helper("-0E-#") == false); + CHECK(accept_helper("-0#") == false); + CHECK(accept_helper("-0.0:") == false); + CHECK(accept_helper("-0.0Z") == false); + CHECK(accept_helper("-0E123:") == false); + CHECK(accept_helper("-0e0-:") == false); + CHECK(accept_helper("-0e-:") == false); + CHECK(accept_helper("-0f") == false); + + // numbers must not begin with "+" + CHECK(accept_helper("+1") == false); + CHECK(accept_helper("+0") == false); + } + } + } + + SECTION("parse errors") + { + // unexpected end of number + CHECK_THROWS_WITH_AS(parser_helper("0."), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected digit after '.'; last read: '0.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid number; expected digit after '-'; last read: '-'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("--"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid number; expected digit after '-'; last read: '--'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0."), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid number; expected digit after '.'; last read: '-0.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-."), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid number; expected digit after '-'; last read: '-.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-:"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid number; expected digit after '-'; last read: '-:'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("0.:"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected digit after '.'; last read: '0.:'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("e."), + "[json.exception.parse_error.101] parse error at line 1, column 1: syntax error while parsing value - invalid literal; last read: 'e'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1e."), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1e.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1e/"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1e/'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1e:"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1e:'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1E."), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1E.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1E/"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1E/'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1E:"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1E:'", json::parse_error&); + + // unexpected end of null + CHECK_THROWS_WITH_AS(parser_helper("n"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid literal; last read: 'n'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("nu"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid literal; last read: 'nu'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("nul"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'nul'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("nulk"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'nulk'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("nulm"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'nulm'", json::parse_error&); + + // unexpected end of true + CHECK_THROWS_WITH_AS(parser_helper("t"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid literal; last read: 't'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("tr"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid literal; last read: 'tr'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("tru"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'tru'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("trud"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'trud'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("truf"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'truf'", json::parse_error&); + + // unexpected end of false + CHECK_THROWS_WITH_AS(parser_helper("f"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid literal; last read: 'f'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("fa"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid literal; last read: 'fa'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("fal"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'fal'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("fals"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid literal; last read: 'fals'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("falsd"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid literal; last read: 'falsd'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("falsf"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid literal; last read: 'falsf'", json::parse_error&); + + // missing/unexpected end of array + CHECK_THROWS_WITH_AS(parser_helper("["), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("[1"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing array - unexpected end of input; expected ']'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("[1,"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("[1,]"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - unexpected ']'; expected '[', '{', or a literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("]"), + "[json.exception.parse_error.101] parse error at line 1, column 1: syntax error while parsing value - unexpected ']'; expected '[', '{', or a literal", json::parse_error&); + + // missing/unexpected end of object + CHECK_THROWS_WITH_AS(parser_helper("{"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing object key - unexpected end of input; expected string literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("{\"foo\""), + "[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing object separator - unexpected end of input; expected ':'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("{\"foo\":"), + "[json.exception.parse_error.101] parse error at line 1, column 8: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("{\"foo\":}"), + "[json.exception.parse_error.101] parse error at line 1, column 8: syntax error while parsing value - unexpected '}'; expected '[', '{', or a literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("{\"foo\":1,}"), + "[json.exception.parse_error.101] parse error at line 1, column 10: syntax error while parsing object key - unexpected '}'; expected string literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("}"), + "[json.exception.parse_error.101] parse error at line 1, column 1: syntax error while parsing value - unexpected '}'; expected '[', '{', or a literal", json::parse_error&); + + // missing/unexpected end of string + CHECK_THROWS_WITH_AS(parser_helper("\""), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: missing closing quote; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\\""), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: missing closing quote; last read: '\"\\\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u\""), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u0\""), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u0\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u01\""), + "[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u01\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u012\""), + "[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u012\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u0"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u0'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u01"), + "[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u01'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u012"), + "[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u012'", json::parse_error&); + + // invalid escapes + for (int c = 1; c < 128; ++c) + { + auto s = std::string("\"\\") + std::string(1, static_cast(c)) + "\""; + + switch (c) + { + // valid escapes + case ('"'): + case ('\\'): + case ('/'): + case ('b'): + case ('f'): + case ('n'): + case ('r'): + case ('t'): + { + CHECK_NOTHROW(parser_helper(s)); + break; + } + + // \u must be followed with four numbers, so we skip it here + case ('u'): + { + break; + } + + // any other combination of backslash and character is invalid + default: + { + CHECK_THROWS_AS(parser_helper(s), json::parse_error&); + // only check error message if c is not a control character + if (c > 0x1f) + { + CHECK_THROWS_WITH_STD_STR(parser_helper(s), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid string: forbidden character after backslash; last read: '\"\\" + std::string(1, static_cast(c)) + "'"); + } + break; + } + } + } + + // invalid \uxxxx escapes + { + // check whether character is a valid hex character + const auto valid = [](int c) + { + switch (c) + { + case ('0'): + case ('1'): + case ('2'): + case ('3'): + case ('4'): + case ('5'): + case ('6'): + case ('7'): + case ('8'): + case ('9'): + case ('a'): + case ('b'): + case ('c'): + case ('d'): + case ('e'): + case ('f'): + case ('A'): + case ('B'): + case ('C'): + case ('D'): + case ('E'): + case ('F'): + { + return true; + } + + default: + { + return false; + } + } + }; + + for (int c = 1; c < 128; ++c) + { + std::string const s = "\"\\u"; + + // create a string with the iterated character at each position + auto s1 = s + "000" + std::string(1, static_cast(c)) + "\""; + auto s2 = s + "00" + std::string(1, static_cast(c)) + "0\""; + auto s3 = s + "0" + std::string(1, static_cast(c)) + "00\""; + auto s4 = s + std::string(1, static_cast(c)) + "000\""; + + if (valid(c)) + { + CAPTURE(s1) + CHECK_NOTHROW(parser_helper(s1)); + CAPTURE(s2) + CHECK_NOTHROW(parser_helper(s2)); + CAPTURE(s3) + CHECK_NOTHROW(parser_helper(s3)); + CAPTURE(s4) + CHECK_NOTHROW(parser_helper(s4)); + } + else + { + CAPTURE(s1) + CHECK_THROWS_AS(parser_helper(s1), json::parse_error&); + // only check error message if c is not a control character + if (c > 0x1f) + { + CHECK_THROWS_WITH_STD_STR(parser_helper(s1), + "[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s1.substr(0, 7) + "'"); + } + + CAPTURE(s2) + CHECK_THROWS_AS(parser_helper(s2), json::parse_error&); + // only check error message if c is not a control character + if (c > 0x1f) + { + CHECK_THROWS_WITH_STD_STR(parser_helper(s2), + "[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s2.substr(0, 6) + "'"); + } + + CAPTURE(s3) + CHECK_THROWS_AS(parser_helper(s3), json::parse_error&); + // only check error message if c is not a control character + if (c > 0x1f) + { + CHECK_THROWS_WITH_STD_STR(parser_helper(s3), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s3.substr(0, 5) + "'"); + } + + CAPTURE(s4) + CHECK_THROWS_AS(parser_helper(s4), json::parse_error&); + // only check error message if c is not a control character + if (c > 0x1f) + { + CHECK_THROWS_WITH_STD_STR(parser_helper(s4), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s4.substr(0, 4) + "'"); + } + } + } + } + + json _; + + // missing part of a surrogate pair + CHECK_THROWS_WITH_AS(_ = json::parse("\"\\uD80C\""), "[json.exception.parse_error.101] parse error at line 1, column 8: syntax error while parsing value - invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\"'", json::parse_error&); + // invalid surrogate pair + CHECK_THROWS_WITH_AS(_ = json::parse("\"\\uD80C\\uD80C\""), + "[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\\uD80C'", json::parse_error&); + CHECK_THROWS_WITH_AS(_ = json::parse("\"\\uD80C\\u0000\""), + "[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\\u0000'", json::parse_error&); + CHECK_THROWS_WITH_AS(_ = json::parse("\"\\uD80C\\uFFFF\""), + "[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\\uFFFF'", json::parse_error&); + } + + SECTION("parse errors (accept)") + { + // unexpected end of number + CHECK(accept_helper("0.") == false); + CHECK(accept_helper("-") == false); + CHECK(accept_helper("--") == false); + CHECK(accept_helper("-0.") == false); + CHECK(accept_helper("-.") == false); + CHECK(accept_helper("-:") == false); + CHECK(accept_helper("0.:") == false); + CHECK(accept_helper("e.") == false); + CHECK(accept_helper("1e.") == false); + CHECK(accept_helper("1e/") == false); + CHECK(accept_helper("1e:") == false); + CHECK(accept_helper("1E.") == false); + CHECK(accept_helper("1E/") == false); + CHECK(accept_helper("1E:") == false); + + // unexpected end of null + CHECK(accept_helper("n") == false); + CHECK(accept_helper("nu") == false); + CHECK(accept_helper("nul") == false); + + // unexpected end of true + CHECK(accept_helper("t") == false); + CHECK(accept_helper("tr") == false); + CHECK(accept_helper("tru") == false); + + // unexpected end of false + CHECK(accept_helper("f") == false); + CHECK(accept_helper("fa") == false); + CHECK(accept_helper("fal") == false); + CHECK(accept_helper("fals") == false); + + // missing/unexpected end of array + CHECK(accept_helper("[") == false); + CHECK(accept_helper("[1") == false); + CHECK(accept_helper("[1,") == false); + CHECK(accept_helper("[1,]") == false); + CHECK(accept_helper("]") == false); + + // missing/unexpected end of object + CHECK(accept_helper("{") == false); + CHECK(accept_helper("{\"foo\"") == false); + CHECK(accept_helper("{\"foo\":") == false); + CHECK(accept_helper("{\"foo\":}") == false); + CHECK(accept_helper("{\"foo\":1,}") == false); + CHECK(accept_helper("}") == false); + + // missing/unexpected end of string + CHECK(accept_helper("\"") == false); + CHECK(accept_helper("\"\\\"") == false); + CHECK(accept_helper("\"\\u\"") == false); + CHECK(accept_helper("\"\\u0\"") == false); + CHECK(accept_helper("\"\\u01\"") == false); + CHECK(accept_helper("\"\\u012\"") == false); + CHECK(accept_helper("\"\\u") == false); + CHECK(accept_helper("\"\\u0") == false); + CHECK(accept_helper("\"\\u01") == false); + CHECK(accept_helper("\"\\u012") == false); + + // unget of newline + CHECK(parser_helper("\n123\n") == 123); + + // invalid escapes + for (int c = 1; c < 128; ++c) + { + auto s = std::string("\"\\") + std::string(1, static_cast(c)) + "\""; + + switch (c) + { + // valid escapes + case ('"'): + case ('\\'): + case ('/'): + case ('b'): + case ('f'): + case ('n'): + case ('r'): + case ('t'): + { + CHECK(json::parser(nlohmann::detail::input_adapter(s)).accept()); + break; + } + + // \u must be followed with four numbers, so we skip it here + case ('u'): + { + break; + } + + // any other combination of backslash and character is invalid + default: + { + CHECK(json::parser(nlohmann::detail::input_adapter(s)).accept() == false); + break; + } + } + } + + // invalid \uxxxx escapes + { + // check whether character is a valid hex character + const auto valid = [](int c) + { + switch (c) + { + case ('0'): + case ('1'): + case ('2'): + case ('3'): + case ('4'): + case ('5'): + case ('6'): + case ('7'): + case ('8'): + case ('9'): + case ('a'): + case ('b'): + case ('c'): + case ('d'): + case ('e'): + case ('f'): + case ('A'): + case ('B'): + case ('C'): + case ('D'): + case ('E'): + case ('F'): + { + return true; + } + + default: + { + return false; + } + } + }; + + for (int c = 1; c < 128; ++c) + { + std::string const s = "\"\\u"; + + // create a string with the iterated character at each position + const auto s1 = s + "000" + std::string(1, static_cast(c)) + "\""; + const auto s2 = s + "00" + std::string(1, static_cast(c)) + "0\""; + const auto s3 = s + "0" + std::string(1, static_cast(c)) + "00\""; + const auto s4 = s + std::string(1, static_cast(c)) + "000\""; + + if (valid(c)) + { + CAPTURE(s1) + CHECK(json::parser(nlohmann::detail::input_adapter(s1)).accept()); + CAPTURE(s2) + CHECK(json::parser(nlohmann::detail::input_adapter(s2)).accept()); + CAPTURE(s3) + CHECK(json::parser(nlohmann::detail::input_adapter(s3)).accept()); + CAPTURE(s4) + CHECK(json::parser(nlohmann::detail::input_adapter(s4)).accept()); + } + else + { + CAPTURE(s1) + CHECK(json::parser(nlohmann::detail::input_adapter(s1)).accept() == false); + + CAPTURE(s2) + CHECK(json::parser(nlohmann::detail::input_adapter(s2)).accept() == false); + + CAPTURE(s3) + CHECK(json::parser(nlohmann::detail::input_adapter(s3)).accept() == false); + + CAPTURE(s4) + CHECK(json::parser(nlohmann::detail::input_adapter(s4)).accept() == false); + } + } + } + + // missing part of a surrogate pair + CHECK(accept_helper("\"\\uD80C\"") == false); + // invalid surrogate pair + CHECK(accept_helper("\"\\uD80C\\uD80C\"") == false); + CHECK(accept_helper("\"\\uD80C\\u0000\"") == false); + CHECK(accept_helper("\"\\uD80C\\uFFFF\"") == false); + } + + SECTION("tests found by mutate++") + { + // test case to make sure no comma precedes the first key + CHECK_THROWS_WITH_AS(parser_helper("{,\"key\": false}"), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing object key - unexpected ','; expected string literal", json::parse_error&); + // test case to make sure an object is properly closed + CHECK_THROWS_WITH_AS(parser_helper("[{\"key\": false true]"), "[json.exception.parse_error.101] parse error at line 1, column 19: syntax error while parsing object - unexpected true literal; expected '}'", json::parse_error&); + + // test case to make sure the callback is properly evaluated after reading a key + { + json::parser_callback_t const cb = [](int /*unused*/, json::parse_event_t event, json& /*unused*/) noexcept + { + return event != json::parse_event_t::key; + }; + + json x = json::parse("{\"key\": false}", cb); + CHECK(x == json::object()); + } + } + + SECTION("callback function") + { + const auto* s_object = R"( + { + "foo": 2, + "bar": { + "baz": 1 + } + } + )"; + + const auto* s_array = R"( + [1,2,[3,4,5],4,5] + )"; + + const auto* structured_array = R"( + [ + 1, + { + "foo": "bar" + }, + { + "qux": "baz" + } + ] + )"; + + SECTION("filter nothing") + { + json j_object = json::parse(s_object, [](int /*unused*/, json::parse_event_t /*unused*/, const json& /*unused*/) noexcept + { + return true; + }); + + CHECK (j_object == json({{"foo", 2}, {"bar", {{"baz", 1}}}})); + + json j_array = json::parse(s_array, [](int /*unused*/, json::parse_event_t /*unused*/, const json& /*unused*/) noexcept + { + return true; + }); + + CHECK (j_array == json({1, 2, {3, 4, 5}, 4, 5})); + } + + SECTION("filter everything") + { + json const j_object = json::parse(s_object, [](int /*unused*/, json::parse_event_t /*unused*/, const json& /*unused*/) noexcept + { + return false; + }); + + // the top-level object will be discarded, leaving a null + CHECK (j_object.is_null()); + + json const j_array = json::parse(s_array, [](int /*unused*/, json::parse_event_t /*unused*/, const json& /*unused*/) noexcept + { + return false; + }); + + // the top-level array will be discarded, leaving a null + CHECK (j_array.is_null()); + } + + SECTION("filter specific element") + { + json j_object = json::parse(s_object, [](int /*unused*/, json::parse_event_t event, const json & j) noexcept + { + // filter all number(2) elements + return event != json::parse_event_t::value || j != json(2); + }); + + CHECK (j_object == json({{"bar", {{"baz", 1}}}})); + + json j_array = json::parse(s_array, [](int /*unused*/, json::parse_event_t event, const json & j) noexcept + { + return event != json::parse_event_t::value || j != json(2); + }); + + CHECK (j_array == json({1, {3, 4, 5}, 4, 5})); + } + + SECTION("filter object in array") + { + json j_filtered1 = json::parse(structured_array, [](int /*unused*/, json::parse_event_t e, const json & parsed) + { + return !(e == json::parse_event_t::object_end && parsed.contains("foo")); + }); + + // the specified object will be discarded, and removed. + CHECK (j_filtered1.size() == 2); + CHECK (j_filtered1 == json({1, {{"qux", "baz"}}})); + + json j_filtered2 = json::parse(structured_array, [](int /*unused*/, json::parse_event_t e, const json& /*parsed*/) noexcept + { + return e != json::parse_event_t::object_end; + }); + + // removed all objects in array. + CHECK (j_filtered2.size() == 1); + CHECK (j_filtered2 == json({1})); + } + + SECTION("filter specific events") + { + SECTION("first closing event") + { + { + json j_object = json::parse(s_object, [](int /*unused*/, json::parse_event_t e, const json& /*unused*/) noexcept + { + static bool first = true; + if (e == json::parse_event_t::object_end && first) + { + first = false; + return false; + } + + return true; + }); + + // the first completed object will be discarded + CHECK (j_object == json({{"foo", 2}})); + } + + { + json j_array = json::parse(s_array, [](int /*unused*/, json::parse_event_t e, const json& /*unused*/) noexcept + { + static bool first = true; + if (e == json::parse_event_t::array_end && first) + { + first = false; + return false; + } + + return true; + }); + + // the first completed array will be discarded + CHECK (j_array == json({1, 2, 4, 5})); + } + } + } + + SECTION("special cases") + { + // the following test cases cover the situation in which an empty + // object and array is discarded only after the closing character + // has been read + + json j_empty_object = json::parse("{}", [](int /*unused*/, json::parse_event_t e, const json& /*unused*/) noexcept + { + return e != json::parse_event_t::object_end; + }); + CHECK(j_empty_object == json()); + + json j_empty_array = json::parse("[]", [](int /*unused*/, json::parse_event_t e, const json& /*unused*/) noexcept + { + return e != json::parse_event_t::array_end; + }); + CHECK(j_empty_array == json()); + } + } + + SECTION("constructing from contiguous containers") + { + SECTION("from std::vector") + { + std::vector v = {'t', 'r', 'u', 'e'}; + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + + SECTION("from std::array") + { + std::array v { {'t', 'r', 'u', 'e'} }; + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + + SECTION("from array") + { + uint8_t v[] = {'t', 'r', 'u', 'e'}; // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + + SECTION("from char literal") + { + CHECK(parser_helper("true") == json(true)); + } + + SECTION("from std::string") + { + std::string v = {'t', 'r', 'u', 'e'}; + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + + SECTION("from std::initializer_list") + { + std::initializer_list const v = {'t', 'r', 'u', 'e'}; + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + + SECTION("from std::valarray") + { + std::valarray v = {'t', 'r', 'u', 'e'}; + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + } + + SECTION("improve test coverage") + { + SECTION("parser with callback") + { + json::parser_callback_t const cb = [](int /*unused*/, json::parse_event_t /*unused*/, json& /*unused*/) noexcept + { + return true; + }; + + CHECK(json::parse("{\"foo\": true:", cb, false).is_discarded()); + + json _; + CHECK_THROWS_WITH_AS(_ = json::parse("{\"foo\": true:", cb), "[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing object - unexpected ':'; expected '}'", json::parse_error&); + + CHECK_THROWS_WITH_AS(_ = json::parse("1.18973e+4932", cb), "[json.exception.out_of_range.406] number overflow parsing '1.18973e+4932'", json::out_of_range&); + } + + SECTION("SAX parser") + { + SECTION("} without value") + { + SaxCountdown s(1); + CHECK(json::sax_parse("{}", &s) == false); + } + + SECTION("} with value") + { + SaxCountdown s(3); + CHECK(json::sax_parse("{\"k1\": true}", &s) == false); + } + + SECTION("second key") + { + SaxCountdown s(3); + CHECK(json::sax_parse("{\"k1\": true, \"k2\": false}", &s) == false); + } + + SECTION("] without value") + { + SaxCountdown s(1); + CHECK(json::sax_parse("[]", &s) == false); + } + + SECTION("] with value") + { + SaxCountdown s(2); + CHECK(json::sax_parse("[1]", &s) == false); + } + + SECTION("float") + { + SaxCountdown s(0); + CHECK(json::sax_parse("3.14", &s) == false); + } + + SECTION("false") + { + SaxCountdown s(0); + CHECK(json::sax_parse("false", &s) == false); + } + + SECTION("null") + { + SaxCountdown s(0); + CHECK(json::sax_parse("null", &s) == false); + } + + SECTION("true") + { + SaxCountdown s(0); + CHECK(json::sax_parse("true", &s) == false); + } + + SECTION("unsigned") + { + SaxCountdown s(0); + CHECK(json::sax_parse("12", &s) == false); + } + + SECTION("integer") + { + SaxCountdown s(0); + CHECK(json::sax_parse("-12", &s) == false); + } + + SECTION("string") + { + SaxCountdown s(0); + CHECK(json::sax_parse("\"foo\"", &s) == false); + } + } + } + + SECTION("error messages for comments") + { + json _; + CHECK_THROWS_WITH_AS(_ = json::parse("/a", nullptr, true, true), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid comment; expecting '/' or '*' after '/'; last read: '/a'", json::parse_error); + CHECK_THROWS_WITH_AS(_ = json::parse("/*", nullptr, true, true), "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid comment; missing closing '*/'; last read: '/*'", json::parse_error); + } + } + + ```` + + +- `` + + ??? "Click to view reference" + + `gitlab.com` + + +{% endraw %} + +**Fallacies:** + +_None_ diff --git a/docs/trustable/trudag/dashboard.md b/docs/trustable/trudag/dashboard.md new file mode 100644 index 0000000..e20b6c1 --- /dev/null +++ b/docs/trustable/trudag/dashboard.md @@ -0,0 +1,91 @@ +# Dashboard +## Evidence Score Distribution + +The distribution of scores for evidence nodes across the graph. + +![No Image](figs/evidence_hist.svg) + +??? example "click to view figure as table" + + |bin|count| + |-|-| + |0.0-0.1 {style="background-color:hsl(12.0, 100%, 61%)"} |16| + |0.1-0.2 {style="background-color:hsl(24.0, 100%, 58%)"} |0| + |0.2-0.3 {style="background-color:hsl(36.0, 100%, 54%)"} |0| + |0.3-0.4 {style="background-color:hsl(48.0, 100%, 51%)"} |0| + |0.4-0.5 {style="background-color:hsl(60.0, 100%, 47%)"} |0| + |0.5-0.6 {style="background-color:hsl(72.0, 100%, 44%)"} |0| + |0.6-0.7 {style="background-color:hsl(84.0, 100%, 40%)"} |0| + |0.7-0.8 {style="background-color:hsl(96.0, 100%, 37%)"} |0| + |0.8-0.9 {style="background-color:hsl(108.0, 100%, 33%)"} |0| + |0.9-1.0 {style="background-color:hsl(120.0, 100%, 30%)"} |0| +## Expectations Score Distribution + +The distribution of scores for expectations nodes across the graph. + +![No Image](figs/expectations_hist.svg) + +??? example "click to view figure as table" + + |bin|count| + |-|-| + |0.0-0.1 {style="background-color:hsl(12.0, 100%, 61%)"} |1| + |0.1-0.2 {style="background-color:hsl(24.0, 100%, 58%)"} |0| + |0.2-0.3 {style="background-color:hsl(36.0, 100%, 54%)"} |0| + |0.3-0.4 {style="background-color:hsl(48.0, 100%, 51%)"} |0| + |0.4-0.5 {style="background-color:hsl(60.0, 100%, 47%)"} |0| + |0.5-0.6 {style="background-color:hsl(72.0, 100%, 44%)"} |0| + |0.6-0.7 {style="background-color:hsl(84.0, 100%, 40%)"} |0| + |0.7-0.8 {style="background-color:hsl(96.0, 100%, 37%)"} |0| + |0.8-0.9 {style="background-color:hsl(108.0, 100%, 33%)"} |0| + |0.9-1.0 {style="background-color:hsl(120.0, 100%, 30%)"} |0| +## All Score Distribution + +The distribution of scores for all nodes across the graph. + +![No Image](figs/all_hist.svg) + +??? example "click to view figure as table" + + |bin|count| + |-|-| + |0.0-0.1 {style="background-color:hsl(12.0, 100%, 61%)"} |24| + |0.1-0.2 {style="background-color:hsl(24.0, 100%, 58%)"} |0| + |0.2-0.3 {style="background-color:hsl(36.0, 100%, 54%)"} |0| + |0.3-0.4 {style="background-color:hsl(48.0, 100%, 51%)"} |0| + |0.4-0.5 {style="background-color:hsl(60.0, 100%, 47%)"} |0| + |0.5-0.6 {style="background-color:hsl(72.0, 100%, 44%)"} |0| + |0.6-0.7 {style="background-color:hsl(84.0, 100%, 40%)"} |0| + |0.7-0.8 {style="background-color:hsl(96.0, 100%, 37%)"} |0| + |0.8-0.9 {style="background-color:hsl(108.0, 100%, 33%)"} |0| + |0.9-1.0 {style="background-color:hsl(120.0, 100%, 30%)"} |0| +## Lennart SME Score Distribution + +![No Image](figs/Lennart_hist.svg) + +??? example "click to view figure as table" + + |bin|count| + |-|-| + |0.0-0.1 {style="background-color:hsl(12.0, 100%, 61%)"} |0| + |0.1-0.2 {style="background-color:hsl(24.0, 100%, 58%)"} |0| + |0.2-0.3 {style="background-color:hsl(36.0, 100%, 54%)"} |0| + |0.3-0.4 {style="background-color:hsl(48.0, 100%, 51%)"} |0| + |0.4-0.5 {style="background-color:hsl(60.0, 100%, 47%)"} |0| + |0.5-0.6 {style="background-color:hsl(72.0, 100%, 44%)"} |0| + |0.6-0.7 {style="background-color:hsl(84.0, 100%, 40%)"} |0| + |0.7-0.8 {style="background-color:hsl(96.0, 100%, 37%)"} |0| + |0.8-0.9 {style="background-color:hsl(108.0, 100%, 33%)"} |0| + |0.9-1.0 {style="background-color:hsl(120.0, 100%, 30%)"} |1| +## Summary + + +| Category | Count | +|----------|-------| +|statements|24| +|reviewed statements|23| +|unreviewed statements|1| +|orphaned statements|0| +|statements with evidence|7| +|evidence|16| +|expectations|1| diff --git a/docs/trustable/trudag/figs/Lennart_hist.svg b/docs/trustable/trudag/figs/Lennart_hist.svg new file mode 100644 index 0000000..87c1c44 --- /dev/null +++ b/docs/trustable/trudag/figs/Lennart_hist.svg @@ -0,0 +1,708 @@ + + + + + + + + 2025-07-07T15:56:06.245421 + image/svg+xml + + + Matplotlib v3.10.3, https://matplotlib.org/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/trustable/trudag/figs/all_hist.svg b/docs/trustable/trudag/figs/all_hist.svg new file mode 100644 index 0000000..9f49f9c --- /dev/null +++ b/docs/trustable/trudag/figs/all_hist.svg @@ -0,0 +1,727 @@ + + + + + + + + 2025-07-07T15:56:06.144785 + image/svg+xml + + + Matplotlib v3.10.3, https://matplotlib.org/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/trustable/trudag/figs/evidence_hist.svg b/docs/trustable/trudag/figs/evidence_hist.svg new file mode 100644 index 0000000..68fb69b --- /dev/null +++ b/docs/trustable/trudag/figs/evidence_hist.svg @@ -0,0 +1,739 @@ + + + + + + + + 2025-07-07T15:56:05.882998 + image/svg+xml + + + Matplotlib v3.10.3, https://matplotlib.org/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/trustable/trudag/figs/expectations_hist.svg b/docs/trustable/trudag/figs/expectations_hist.svg new file mode 100644 index 0000000..e1c10b1 --- /dev/null +++ b/docs/trustable/trudag/figs/expectations_hist.svg @@ -0,0 +1,708 @@ + + + + + + + + 2025-07-07T15:56:05.982779 + image/svg+xml + + + Matplotlib v3.10.3, https://matplotlib.org/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/trustable/trudag/m2r2_test/TA.rst b/docs/trustable/trudag/m2r2_test/TA.rst new file mode 100644 index 0000000..4c9c844 --- /dev/null +++ b/docs/trustable/trudag/m2r2_test/TA.rst @@ -0,0 +1,532 @@ +TA +== + + +---- + + + +.. _ta-analysis: + +TA-ANALYSIS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Collected data from tests and monitoring of deployed software is analysed according to specified objectives. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-results`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-behaviours: + +TA-BEHAVIOURS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Expected or required behaviours for JSON-Library are identified, specified, verified and validated based on analysis. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-expectations`\ + +**Supporting Items:** + + +* :ref:`wfj-01`\ + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-confidence: + +TA-CONFIDENCE +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Confidence in JSON-Library is measured based on results of analysis. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-confidence`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-constraints: + +TA-CONSTRAINTS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Constraints on adaptation and deployment of JSON-Library are specified. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-expectations`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-data: + +TA-DATA +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Data is collected from tests, and from monitoring of deployed software, according to specified objectives. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-results`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-fixes: + +TA-FIXES +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Known bugs or misbehaviours are analysed and triaged, and critical fixes or mitigations are implemented or applied. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-changes`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-indicators: + +TA-INDICATORS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Advance warning indicators for misbehaviours are identified, and monitoring mechanisms are specified, verified and validated based on analysis. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-expectations`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-inputs: + +TA-INPUTS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All inputs to JSON-Library are assessed, to identify potential risks and issues. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-provenance`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-iterations: + +TA-ITERATIONS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All constructed iterations of JSON-Library include source code, build instructions, tests, results and attestations. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-construction`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-methodologies: + +TA-METHODOLOGIES +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Manual methodologies applied for JSON-Library by contributors, and their results, are managed according to specified objectives. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-confidence`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-misbehaviours: + +TA-MISBEHAVIOURS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Prohibited misbehaviours for JSON-Library are identified, and mitigations are specified, verified and validated based on analysis. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-expectations`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-releases: + +TA-RELEASES +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Construction of JSON-Library releases is fully repeatable and the results are fully reproducible, with any exceptions documented and justified. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-construction`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-supply_chain: + +TA-SUPPLY_CHAIN +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All sources for JSON-Library and tools are mirrored in our controlled environment. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-provenance`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-tests: + +TA-TESTS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All tests for JSON-Library, and its build and test environments, are constructed from controlled/mirrored sources and are reproducible, with any exceptions documented. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-construction`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-updates: + +TA-UPDATES +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +JSON-Library components, configurations and tools are updated under specified change and configuration management controls. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-changes`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _ta-validation: + +TA-VALIDATION +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All specified tests are executed repeatedly, under defined conditions in controlled environments, according to specified objectives. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`tt-results`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* diff --git a/docs/trustable/trudag/m2r2_test/TRUSTABLE.rst b/docs/trustable/trudag/m2r2_test/TRUSTABLE.rst new file mode 100644 index 0000000..50060bf --- /dev/null +++ b/docs/trustable/trudag/m2r2_test/TRUSTABLE.rst @@ -0,0 +1,39 @@ +TRUSTABLE +========= + + +---- + + + +.. _trustable-software: + +TRUSTABLE-SOFTWARE +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This release of JSON-Library is Trustable. +{: .expanded-item-element } + +**Supported Requests:** + +**Supporting Items:** + + +* :ref:`tt-changes`\ +* :ref:`tt-confidence`\ +* :ref:`tt-construction`\ +* :ref:`tt-expectations`\ +* :ref:`tt-provenance`\ +* :ref:`tt-results`\ + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* diff --git a/docs/trustable/trudag/m2r2_test/TT.rst b/docs/trustable/trudag/m2r2_test/TT.rst new file mode 100644 index 0000000..deb3456 --- /dev/null +++ b/docs/trustable/trudag/m2r2_test/TT.rst @@ -0,0 +1,217 @@ +TT +== + + +---- + + + +.. _tt-changes: + +TT-CHANGES +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +JSON-Library is actively maintained, with regular updates to dependencies, and changes are verified to prevent regressions. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`trustable-software`\ + +**Supporting Items:** + + +* :ref:`ta-fixes`\ +* :ref:`ta-updates`\ + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _tt-confidence: + +TT-CONFIDENCE +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Confidence in JSON-Library is achieved by measuring and analysing behaviour and evidence over time. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`trustable-software`\ + +**Supporting Items:** + + +* :ref:`ta-confidence`\ +* :ref:`ta-methodologies`\ + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _tt-construction: + +TT-CONSTRUCTION +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Tools are provided to build JSON-Library from trusted sources (also provided) with full reproducibility. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`trustable-software`\ + +**Supporting Items:** + + +* :ref:`ta-iterations`\ +* :ref:`ta-releases`\ +* :ref:`ta-tests`\ + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _tt-expectations: + +TT-EXPECTATIONS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Documentation is provided, specifying what JSON-Library is expected to do, and what it must not do, and how this is verified. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`trustable-software`\ + +**Supporting Items:** + + +* :ref:`ta-behaviours`\ +* :ref:`ta-constraints`\ +* :ref:`ta-indicators`\ +* :ref:`ta-misbehaviours`\ + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _tt-provenance: + +TT-PROVENANCE +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All inputs (and attestations for claims) for JSON-Library are provided with known provenance. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`trustable-software`\ + +**Supporting Items:** + + +* :ref:`ta-inputs`\ +* :ref:`ta-supply_chain`\ + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* + +---- + + + +.. _tt-results: + +TT-RESULTS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Evidence is provided to demonstrate that JSON-Library does what it is supposed to do, and does not do what it must not do. +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`trustable-software`\ + +**Supporting Items:** + + +* :ref:`ta-analysis`\ +* :ref:`ta-data`\ +* :ref:`ta-validation`\ + +{% raw %} + +**References:** + +*None* + +{% endraw %} + +**Fallacies:** + +*None* diff --git a/docs/trustable/trudag/m2r2_test/WFJ.rst b/docs/trustable/trudag/m2r2_test/WFJ.rst new file mode 100644 index 0000000..e1ea8f3 --- /dev/null +++ b/docs/trustable/trudag/m2r2_test/WFJ.rst @@ -0,0 +1,1747 @@ +WFJ +=== + + +---- + + + +.. _wfj-01: + +WFJ-01 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The service checks for the four primitive types (strings, numbers, booleans, null). +{: .expanded-item-element } + +**Supported Requests:** + + +* :ref:`ta-behaviours`\ + +**Supporting Items:** + +*None* + +{% raw %} + +**References:** + + +* + ``nlohmann_json/tests/src/unit-class_parser.cpp`` + + ??? "Click to view reference" + + .. code-block:: + + ````cpp + // __ _____ _____ _____ + // __| | __| | | | JSON for Modern C++ (supporting code) + // | | |__ | | | | | | version 3.12.0 + // |_____|_____|_____|_|___| https://github.com/nlohmann/json + // + // SPDX-FileCopyrightText: 2013 - 2025 Niels Lohmann + // SPDX-License-Identifier: MIT + + #include "doctest_compatibility.h" + + #define JSON_TESTS_PRIVATE + #include + using nlohmann::json; + #ifdef JSON_TEST_NO_GLOBAL_UDLS + using namespace nlohmann::literals; // NOLINT(google-build-using-namespace) + #endif + + #include + + namespace + { + class SaxEventLogger + { + public: + bool null() + { + events.emplace_back("null()"); + return true; + } + + bool boolean(bool val) + { + events.emplace_back(val ? "boolean(true)" : "boolean(false)"); + return true; + } + + bool number_integer(json::number_integer_t val) + { + events.push_back("number_integer(" + std::to_string(val) + ")"); + return true; + } + + bool number_unsigned(json::number_unsigned_t val) + { + events.push_back("number_unsigned(" + std::to_string(val) + ")"); + return true; + } + + bool number_float(json::number_float_t /*unused*/, const std::string& s) + { + events.push_back("number_float(" + s + ")"); + return true; + } + + bool string(std::string& val) + { + events.push_back("string(" + val + ")"); + return true; + } + + bool binary(json::binary_t& val) + { + std::string binary_contents = "binary("; + std::string comma_space; + for (auto b : val) + { + binary_contents.append(comma_space); + binary_contents.append(std::to_string(static_cast(b))); + comma_space = ", "; + } + binary_contents.append(")"); + events.push_back(binary_contents); + return true; + } + + bool start_object(std::size_t elements) + { + if (elements == (std::numeric_limits::max)()) + { + events.emplace_back("start_object()"); + } + else + { + events.push_back("start_object(" + std::to_string(elements) + ")"); + } + return true; + } + + bool key(std::string& val) + { + events.push_back("key(" + val + ")"); + return true; + } + + bool end_object() + { + events.emplace_back("end_object()"); + return true; + } + + bool start_array(std::size_t elements) + { + if (elements == (std::numeric_limits::max)()) + { + events.emplace_back("start_array()"); + } + else + { + events.push_back("start_array(" + std::to_string(elements) + ")"); + } + return true; + } + + bool end_array() + { + events.emplace_back("end_array()"); + return true; + } + + bool parse_error(std::size_t position, const std::string& /*unused*/, const json::exception& /*unused*/) + { + errored = true; + events.push_back("parse_error(" + std::to_string(position) + ")"); + return false; + } + + std::vector events {}; // NOLINT(readability-redundant-member-init) + bool errored = false; + }; + + class SaxCountdown : public nlohmann::json::json_sax_t + { + public: + explicit SaxCountdown(const int count) : events_left(count) + {} + + bool null() override + { + return events_left-- > 0; + } + + bool boolean(bool /*val*/) override + { + return events_left-- > 0; + } + + bool number_integer(json::number_integer_t /*val*/) override + { + return events_left-- > 0; + } + + bool number_unsigned(json::number_unsigned_t /*val*/) override + { + return events_left-- > 0; + } + + bool number_float(json::number_float_t /*val*/, const std::string& /*s*/) override + { + return events_left-- > 0; + } + + bool string(std::string& /*val*/) override + { + return events_left-- > 0; + } + + bool binary(json::binary_t& /*val*/) override + { + return events_left-- > 0; + } + + bool start_object(std::size_t /*elements*/) override + { + return events_left-- > 0; + } + + bool key(std::string& /*val*/) override + { + return events_left-- > 0; + } + + bool end_object() override + { + return events_left-- > 0; + } + + bool start_array(std::size_t /*elements*/) override + { + return events_left-- > 0; + } + + bool end_array() override + { + return events_left-- > 0; + } + + bool parse_error(std::size_t /*position*/, const std::string& /*last_token*/, const json::exception& /*ex*/) override + { + return false; + } + + private: + int events_left = 0; + }; + + json parser_helper(const std::string& s); + bool accept_helper(const std::string& s); + void comments_helper(const std::string& s); + + json parser_helper(const std::string& s) + { + json j; + json::parser(nlohmann::detail::input_adapter(s)).parse(true, j); + + // if this line was reached, no exception occurred + // -> check if result is the same without exceptions + json j_nothrow; + CHECK_NOTHROW(json::parser(nlohmann::detail::input_adapter(s), nullptr, false).parse(true, j_nothrow)); + CHECK(j_nothrow == j); + + json j_sax; + nlohmann::detail::json_sax_dom_parser sdp(j_sax); + json::sax_parse(s, &sdp); + CHECK(j_sax == j); + + comments_helper(s); + + return j; + } + + bool accept_helper(const std::string& s) + { + CAPTURE(s) + + // 1. parse s without exceptions + json j; + CHECK_NOTHROW(json::parser(nlohmann::detail::input_adapter(s), nullptr, false).parse(true, j)); + const bool ok_noexcept = !j.is_discarded(); + + // 2. accept s + const bool ok_accept = json::parser(nlohmann::detail::input_adapter(s)).accept(true); + + // 3. check if both approaches come to the same result + CHECK(ok_noexcept == ok_accept); + + // 4. parse with SAX (compare with relaxed accept result) + SaxEventLogger el; + CHECK_NOTHROW(json::sax_parse(s, &el, json::input_format_t::json, false)); + CHECK(json::parser(nlohmann::detail::input_adapter(s)).accept(false) == !el.errored); + + // 5. parse with simple callback + json::parser_callback_t const cb = [](int /*unused*/, json::parse_event_t /*unused*/, json& /*unused*/) noexcept + { + return true; + }; + json const j_cb = json::parse(s, cb, false); + const bool ok_noexcept_cb = !j_cb.is_discarded(); + + // 6. check if this approach came to the same result + CHECK(ok_noexcept == ok_noexcept_cb); + + // 7. check if comments are properly ignored + if (ok_accept) + { + comments_helper(s); + } + + // 8. return result + return ok_accept; + } + + void comments_helper(const std::string& s) + { + json _; + + // parse/accept with default parser + CHECK_NOTHROW(_ = json::parse(s)); + CHECK(json::accept(s)); + + // parse/accept while skipping comments + CHECK_NOTHROW(_ = json::parse(s, nullptr, false, true)); + CHECK(json::accept(s, true)); + + std::vector json_with_comments; + + // start with a comment + json_with_comments.push_back(std::string("// this is a comment\n") + s); + json_with_comments.push_back(std::string("/* this is a comment */") + s); + // end with a comment + json_with_comments.push_back(s + "// this is a comment"); + json_with_comments.push_back(s + "/* this is a comment */"); + + // check all strings + for (const auto& json_with_comment : json_with_comments) + { + CAPTURE(json_with_comment) + CHECK_THROWS_AS(_ = json::parse(json_with_comment), json::parse_error); + CHECK(!json::accept(json_with_comment)); + + CHECK_NOTHROW(_ = json::parse(json_with_comment, nullptr, true, true)); + CHECK(json::accept(json_with_comment, true)); + } + } + + } // namespace + + TEST_CASE("parser class") + { + SECTION("parse") + { + SECTION("null") + { + CHECK(parser_helper("null") == json(nullptr)); + } + + SECTION("true") + { + CHECK(parser_helper("true") == json(true)); + } + + SECTION("false") + { + CHECK(parser_helper("false") == json(false)); + } + + SECTION("array") + { + SECTION("empty array") + { + CHECK(parser_helper("[]") == json(json::value_t::array)); + CHECK(parser_helper("[ ]") == json(json::value_t::array)); + } + + SECTION("nonempty array") + { + CHECK(parser_helper("[true, false, null]") == json({true, false, nullptr})); + } + } + + SECTION("object") + { + SECTION("empty object") + { + CHECK(parser_helper("{}") == json(json::value_t::object)); + CHECK(parser_helper("{ }") == json(json::value_t::object)); + } + + SECTION("nonempty object") + { + CHECK(parser_helper("{\"\": true, \"one\": 1, \"two\": null}") == json({{"", true}, {"one", 1}, {"two", nullptr}})); + } + } + + SECTION("string") + { + // empty string + CHECK(parser_helper("\"\"") == json(json::value_t::string)); + + SECTION("errors") + { + // error: tab in string + CHECK_THROWS_WITH_AS(parser_helper("\"\t\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0009 (HT) must be escaped to \\u0009 or \\t; last read: '\"'", json::parse_error&); + // error: newline in string + CHECK_THROWS_WITH_AS(parser_helper("\"\n\""), "[json.exception.parse_error.101] parse error at line 2, column 0: syntax error while parsing value - invalid string: control character U+000A (LF) must be escaped to \\u000A or \\n; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\r\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000D (CR) must be escaped to \\u000D or \\r; last read: '\"'", json::parse_error&); + // error: backspace in string + CHECK_THROWS_WITH_AS(parser_helper("\"\b\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0008 (BS) must be escaped to \\u0008 or \\b; last read: '\"'", json::parse_error&); + // improve code coverage + CHECK_THROWS_AS(parser_helper("\uFF01"), json::parse_error&); + CHECK_THROWS_AS(parser_helper("[-4:1,]"), json::parse_error&); + // unescaped control characters + CHECK_THROWS_WITH_AS(parser_helper("\"\x00\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: missing closing quote; last read: '\"'", json::parse_error&); // NOLINT(bugprone-string-literal-with-embedded-nul) + CHECK_THROWS_WITH_AS(parser_helper("\"\x01\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0001 (SOH) must be escaped to \\u0001; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x02\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0002 (STX) must be escaped to \\u0002; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x03\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0003 (ETX) must be escaped to \\u0003; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x04\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0004 (EOT) must be escaped to \\u0004; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x05\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0005 (ENQ) must be escaped to \\u0005; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x06\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0006 (ACK) must be escaped to \\u0006; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x07\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0007 (BEL) must be escaped to \\u0007; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x08\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0008 (BS) must be escaped to \\u0008 or \\b; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x09\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0009 (HT) must be escaped to \\u0009 or \\t; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0a\""), "[json.exception.parse_error.101] parse error at line 2, column 0: syntax error while parsing value - invalid string: control character U+000A (LF) must be escaped to \\u000A or \\n; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0b\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000B (VT) must be escaped to \\u000B; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0c\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000C (FF) must be escaped to \\u000C or \\f; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0d\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000D (CR) must be escaped to \\u000D or \\r; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0e\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000E (SO) must be escaped to \\u000E; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x0f\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+000F (SI) must be escaped to \\u000F; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x10\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0010 (DLE) must be escaped to \\u0010; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x11\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0011 (DC1) must be escaped to \\u0011; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x12\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0012 (DC2) must be escaped to \\u0012; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x13\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0013 (DC3) must be escaped to \\u0013; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x14\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0014 (DC4) must be escaped to \\u0014; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x15\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0015 (NAK) must be escaped to \\u0015; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x16\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0016 (SYN) must be escaped to \\u0016; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x17\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0017 (ETB) must be escaped to \\u0017; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x18\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0018 (CAN) must be escaped to \\u0018; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x19\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0019 (EM) must be escaped to \\u0019; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1a\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001A (SUB) must be escaped to \\u001A; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1b\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001B (ESC) must be escaped to \\u001B; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1c\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001C (FS) must be escaped to \\u001C; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1d\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001D (GS) must be escaped to \\u001D; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1e\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001E (RS) must be escaped to \\u001E; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\x1f\""), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+001F (US) must be escaped to \\u001F; last read: '\"'", json::parse_error&); + + SECTION("additional test for null byte") + { + // The test above for the null byte is wrong, because passing + // a string to the parser only reads int until it encounters + // a null byte. This test inserts the null byte later on and + // uses an iterator range. + std::string s = "\"1\""; + s[1] = '\0'; + json _; + CHECK_THROWS_WITH_AS(_ = json::parse(s.begin(), s.end()), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: control character U+0000 (NUL) must be escaped to \\u0000; last read: '\"'", json::parse_error&); + } + } + + SECTION("escaped") + { + // quotation mark "\"" + auto r1 = R"("\"")"_json; + CHECK(parser_helper("\"\\\"\"") == r1); + // reverse solidus "\\" + auto r2 = R"("\\")"_json; + CHECK(parser_helper("\"\\\\\"") == r2); + // solidus + CHECK(parser_helper("\"\\/\"") == R"("/")"_json); + // backspace + CHECK(parser_helper("\"\\b\"") == json("\b")); + // formfeed + CHECK(parser_helper("\"\\f\"") == json("\f")); + // newline + CHECK(parser_helper("\"\\n\"") == json("\n")); + // carriage return + CHECK(parser_helper("\"\\r\"") == json("\r")); + // horizontal tab + CHECK(parser_helper("\"\\t\"") == json("\t")); + + CHECK(parser_helper("\"\\u0001\"").get() == "\x01"); + CHECK(parser_helper("\"\\u000a\"").get() == "\n"); + CHECK(parser_helper("\"\\u00b0\"").get() == "°"); + CHECK(parser_helper("\"\\u0c00\"").get() == "ఀ"); + CHECK(parser_helper("\"\\ud000\"").get() == "퀀"); + CHECK(parser_helper("\"\\u000E\"").get() == "\x0E"); + CHECK(parser_helper("\"\\u00F0\"").get() == "ð"); + CHECK(parser_helper("\"\\u0100\"").get() == "Ā"); + CHECK(parser_helper("\"\\u2000\"").get() == " "); + CHECK(parser_helper("\"\\uFFFF\"").get() == "￿"); + CHECK(parser_helper("\"\\u20AC\"").get() == "€"); + CHECK(parser_helper("\"€\"").get() == "€"); + CHECK(parser_helper("\"🎈\"").get() == "🎈"); + + CHECK(parser_helper("\"\\ud80c\\udc60\"").get() == "\xf0\x93\x81\xa0"); + CHECK(parser_helper("\"\\ud83c\\udf1e\"").get() == "🌞"); + } + } + + SECTION("number") + { + SECTION("integers") + { + SECTION("without exponent") + { + CHECK(parser_helper("-128") == json(-128)); + CHECK(parser_helper("-0") == json(-0)); + CHECK(parser_helper("0") == json(0)); + CHECK(parser_helper("128") == json(128)); + } + + SECTION("with exponent") + { + CHECK(parser_helper("0e1") == json(0e1)); + CHECK(parser_helper("0E1") == json(0e1)); + + CHECK(parser_helper("10000E-4") == json(10000e-4)); + CHECK(parser_helper("10000E-3") == json(10000e-3)); + CHECK(parser_helper("10000E-2") == json(10000e-2)); + CHECK(parser_helper("10000E-1") == json(10000e-1)); + CHECK(parser_helper("10000E0") == json(10000e0)); + CHECK(parser_helper("10000E1") == json(10000e1)); + CHECK(parser_helper("10000E2") == json(10000e2)); + CHECK(parser_helper("10000E3") == json(10000e3)); + CHECK(parser_helper("10000E4") == json(10000e4)); + + CHECK(parser_helper("10000e-4") == json(10000e-4)); + CHECK(parser_helper("10000e-3") == json(10000e-3)); + CHECK(parser_helper("10000e-2") == json(10000e-2)); + CHECK(parser_helper("10000e-1") == json(10000e-1)); + CHECK(parser_helper("10000e0") == json(10000e0)); + CHECK(parser_helper("10000e1") == json(10000e1)); + CHECK(parser_helper("10000e2") == json(10000e2)); + CHECK(parser_helper("10000e3") == json(10000e3)); + CHECK(parser_helper("10000e4") == json(10000e4)); + + CHECK(parser_helper("-0e1") == json(-0e1)); + CHECK(parser_helper("-0E1") == json(-0e1)); + CHECK(parser_helper("-0E123") == json(-0e123)); + + // numbers after exponent + CHECK(parser_helper("10E0") == json(10e0)); + CHECK(parser_helper("10E1") == json(10e1)); + CHECK(parser_helper("10E2") == json(10e2)); + CHECK(parser_helper("10E3") == json(10e3)); + CHECK(parser_helper("10E4") == json(10e4)); + CHECK(parser_helper("10E5") == json(10e5)); + CHECK(parser_helper("10E6") == json(10e6)); + CHECK(parser_helper("10E7") == json(10e7)); + CHECK(parser_helper("10E8") == json(10e8)); + CHECK(parser_helper("10E9") == json(10e9)); + CHECK(parser_helper("10E+0") == json(10e0)); + CHECK(parser_helper("10E+1") == json(10e1)); + CHECK(parser_helper("10E+2") == json(10e2)); + CHECK(parser_helper("10E+3") == json(10e3)); + CHECK(parser_helper("10E+4") == json(10e4)); + CHECK(parser_helper("10E+5") == json(10e5)); + CHECK(parser_helper("10E+6") == json(10e6)); + CHECK(parser_helper("10E+7") == json(10e7)); + CHECK(parser_helper("10E+8") == json(10e8)); + CHECK(parser_helper("10E+9") == json(10e9)); + CHECK(parser_helper("10E-1") == json(10e-1)); + CHECK(parser_helper("10E-2") == json(10e-2)); + CHECK(parser_helper("10E-3") == json(10e-3)); + CHECK(parser_helper("10E-4") == json(10e-4)); + CHECK(parser_helper("10E-5") == json(10e-5)); + CHECK(parser_helper("10E-6") == json(10e-6)); + CHECK(parser_helper("10E-7") == json(10e-7)); + CHECK(parser_helper("10E-8") == json(10e-8)); + CHECK(parser_helper("10E-9") == json(10e-9)); + } + + SECTION("edge cases") + { + // From RFC8259, Section 6: + // Note that when such software is used, numbers that are + // integers and are in the range [-(2**53)+1, (2**53)-1] + // are interoperable in the sense that implementations will + // agree exactly on their numeric values. + + // -(2**53)+1 + CHECK(parser_helper("-9007199254740991").get() == -9007199254740991); + // (2**53)-1 + CHECK(parser_helper("9007199254740991").get() == 9007199254740991); + } + + SECTION("over the edge cases") // issue #178 - Integer conversion to unsigned (incorrect handling of 64-bit integers) + { + // While RFC8259, Section 6 specifies a preference for support + // for ranges in range of IEEE 754-2008 binary64 (double precision) + // this does not accommodate 64-bit integers without loss of accuracy. + // As 64-bit integers are now widely used in software, it is desirable + // to expand support to the full 64 bit (signed and unsigned) range + // i.e. -(2**63) -> (2**64)-1. + + // -(2**63) ** Note: compilers see negative literals as negated positive numbers (hence the -1)) + CHECK(parser_helper("-9223372036854775808").get() == -9223372036854775807 - 1); + // (2**63)-1 + CHECK(parser_helper("9223372036854775807").get() == 9223372036854775807); + // (2**64)-1 + CHECK(parser_helper("18446744073709551615").get() == 18446744073709551615u); + } + } + + SECTION("floating-point") + { + SECTION("without exponent") + { + CHECK(parser_helper("-128.5") == json(-128.5)); + CHECK(parser_helper("0.999") == json(0.999)); + CHECK(parser_helper("128.5") == json(128.5)); + CHECK(parser_helper("-0.0") == json(-0.0)); + } + + SECTION("with exponent") + { + CHECK(parser_helper("-128.5E3") == json(-128.5E3)); + CHECK(parser_helper("-128.5E-3") == json(-128.5E-3)); + CHECK(parser_helper("-0.0e1") == json(-0.0e1)); + CHECK(parser_helper("-0.0E1") == json(-0.0e1)); + } + } + + SECTION("overflow") + { + // overflows during parsing yield an exception + CHECK_THROWS_WITH_AS(parser_helper("1.18973e+4932").empty(), "[json.exception.out_of_range.406] number overflow parsing '1.18973e+4932'", json::out_of_range&); + } + + SECTION("invalid numbers") + { + // numbers must not begin with "+" + CHECK_THROWS_AS(parser_helper("+1"), json::parse_error&); + CHECK_THROWS_AS(parser_helper("+0"), json::parse_error&); + + CHECK_THROWS_WITH_AS(parser_helper("01"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - unexpected number literal; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-01"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - unexpected number literal; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("--1"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid number; expected digit after '-'; last read: '--'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1."), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected digit after '.'; last read: '1.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1E"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1E'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1E-"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid number; expected digit after exponent sign; last read: '1E-'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1.E1"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected digit after '.'; last read: '1.E'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-1E"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '-1E'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0E#"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '-0E#'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0E-#"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid number; expected digit after exponent sign; last read: '-0E-#'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0#"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid literal; last read: '-0#'; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0.0:"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - unexpected ':'; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0.0Z"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid literal; last read: '-0.0Z'; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0E123:"), + "[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - unexpected ':'; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0e0-:"), + "[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid number; expected digit after '-'; last read: '-:'; expected end of input", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0e-:"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid number; expected digit after exponent sign; last read: '-0e-:'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0f"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: '-0f'; expected end of input", json::parse_error&); + } + } + } + + SECTION("accept") + { + SECTION("null") + { + CHECK(accept_helper("null")); + } + + SECTION("true") + { + CHECK(accept_helper("true")); + } + + SECTION("false") + { + CHECK(accept_helper("false")); + } + + SECTION("array") + { + SECTION("empty array") + { + CHECK(accept_helper("[]")); + CHECK(accept_helper("[ ]")); + } + + SECTION("nonempty array") + { + CHECK(accept_helper("[true, false, null]")); + } + } + + SECTION("object") + { + SECTION("empty object") + { + CHECK(accept_helper("{}")); + CHECK(accept_helper("{ }")); + } + + SECTION("nonempty object") + { + CHECK(accept_helper("{\"\": true, \"one\": 1, \"two\": null}")); + } + } + + SECTION("string") + { + // empty string + CHECK(accept_helper("\"\"")); + + SECTION("errors") + { + // error: tab in string + CHECK(accept_helper("\"\t\"") == false); + // error: newline in string + CHECK(accept_helper("\"\n\"") == false); + CHECK(accept_helper("\"\r\"") == false); + // error: backspace in string + CHECK(accept_helper("\"\b\"") == false); + // improve code coverage + CHECK(accept_helper("\uFF01") == false); + CHECK(accept_helper("[-4:1,]") == false); + // unescaped control characters + CHECK(accept_helper("\"\x00\"") == false); // NOLINT(bugprone-string-literal-with-embedded-nul) + CHECK(accept_helper("\"\x01\"") == false); + CHECK(accept_helper("\"\x02\"") == false); + CHECK(accept_helper("\"\x03\"") == false); + CHECK(accept_helper("\"\x04\"") == false); + CHECK(accept_helper("\"\x05\"") == false); + CHECK(accept_helper("\"\x06\"") == false); + CHECK(accept_helper("\"\x07\"") == false); + CHECK(accept_helper("\"\x08\"") == false); + CHECK(accept_helper("\"\x09\"") == false); + CHECK(accept_helper("\"\x0a\"") == false); + CHECK(accept_helper("\"\x0b\"") == false); + CHECK(accept_helper("\"\x0c\"") == false); + CHECK(accept_helper("\"\x0d\"") == false); + CHECK(accept_helper("\"\x0e\"") == false); + CHECK(accept_helper("\"\x0f\"") == false); + CHECK(accept_helper("\"\x10\"") == false); + CHECK(accept_helper("\"\x11\"") == false); + CHECK(accept_helper("\"\x12\"") == false); + CHECK(accept_helper("\"\x13\"") == false); + CHECK(accept_helper("\"\x14\"") == false); + CHECK(accept_helper("\"\x15\"") == false); + CHECK(accept_helper("\"\x16\"") == false); + CHECK(accept_helper("\"\x17\"") == false); + CHECK(accept_helper("\"\x18\"") == false); + CHECK(accept_helper("\"\x19\"") == false); + CHECK(accept_helper("\"\x1a\"") == false); + CHECK(accept_helper("\"\x1b\"") == false); + CHECK(accept_helper("\"\x1c\"") == false); + CHECK(accept_helper("\"\x1d\"") == false); + CHECK(accept_helper("\"\x1e\"") == false); + CHECK(accept_helper("\"\x1f\"") == false); + } + + SECTION("escaped") + { + // quotation mark "\"" + auto r1 = R"("\"")"_json; + CHECK(accept_helper("\"\\\"\"")); + // reverse solidus "\\" + auto r2 = R"("\\")"_json; + CHECK(accept_helper("\"\\\\\"")); + // solidus + CHECK(accept_helper("\"\\/\"")); + // backspace + CHECK(accept_helper("\"\\b\"")); + // formfeed + CHECK(accept_helper("\"\\f\"")); + // newline + CHECK(accept_helper("\"\\n\"")); + // carriage return + CHECK(accept_helper("\"\\r\"")); + // horizontal tab + CHECK(accept_helper("\"\\t\"")); + + CHECK(accept_helper("\"\\u0001\"")); + CHECK(accept_helper("\"\\u000a\"")); + CHECK(accept_helper("\"\\u00b0\"")); + CHECK(accept_helper("\"\\u0c00\"")); + CHECK(accept_helper("\"\\ud000\"")); + CHECK(accept_helper("\"\\u000E\"")); + CHECK(accept_helper("\"\\u00F0\"")); + CHECK(accept_helper("\"\\u0100\"")); + CHECK(accept_helper("\"\\u2000\"")); + CHECK(accept_helper("\"\\uFFFF\"")); + CHECK(accept_helper("\"\\u20AC\"")); + CHECK(accept_helper("\"€\"")); + CHECK(accept_helper("\"🎈\"")); + + CHECK(accept_helper("\"\\ud80c\\udc60\"")); + CHECK(accept_helper("\"\\ud83c\\udf1e\"")); + } + } + + SECTION("number") + { + SECTION("integers") + { + SECTION("without exponent") + { + CHECK(accept_helper("-128")); + CHECK(accept_helper("-0")); + CHECK(accept_helper("0")); + CHECK(accept_helper("128")); + } + + SECTION("with exponent") + { + CHECK(accept_helper("0e1")); + CHECK(accept_helper("0E1")); + + CHECK(accept_helper("10000E-4")); + CHECK(accept_helper("10000E-3")); + CHECK(accept_helper("10000E-2")); + CHECK(accept_helper("10000E-1")); + CHECK(accept_helper("10000E0")); + CHECK(accept_helper("10000E1")); + CHECK(accept_helper("10000E2")); + CHECK(accept_helper("10000E3")); + CHECK(accept_helper("10000E4")); + + CHECK(accept_helper("10000e-4")); + CHECK(accept_helper("10000e-3")); + CHECK(accept_helper("10000e-2")); + CHECK(accept_helper("10000e-1")); + CHECK(accept_helper("10000e0")); + CHECK(accept_helper("10000e1")); + CHECK(accept_helper("10000e2")); + CHECK(accept_helper("10000e3")); + CHECK(accept_helper("10000e4")); + + CHECK(accept_helper("-0e1")); + CHECK(accept_helper("-0E1")); + CHECK(accept_helper("-0E123")); + } + + SECTION("edge cases") + { + // From RFC8259, Section 6: + // Note that when such software is used, numbers that are + // integers and are in the range [-(2**53)+1, (2**53)-1] + // are interoperable in the sense that implementations will + // agree exactly on their numeric values. + + // -(2**53)+1 + CHECK(accept_helper("-9007199254740991")); + // (2**53)-1 + CHECK(accept_helper("9007199254740991")); + } + + SECTION("over the edge cases") // issue #178 - Integer conversion to unsigned (incorrect handling of 64-bit integers) + { + // While RFC8259, Section 6 specifies a preference for support + // for ranges in range of IEEE 754-2008 binary64 (double precision) + // this does not accommodate 64 bit integers without loss of accuracy. + // As 64 bit integers are now widely used in software, it is desirable + // to expand support to the full 64 bit (signed and unsigned) range + // i.e. -(2**63) -> (2**64)-1. + + // -(2**63) ** Note: compilers see negative literals as negated positive numbers (hence the -1)) + CHECK(accept_helper("-9223372036854775808")); + // (2**63)-1 + CHECK(accept_helper("9223372036854775807")); + // (2**64)-1 + CHECK(accept_helper("18446744073709551615")); + } + } + + SECTION("floating-point") + { + SECTION("without exponent") + { + CHECK(accept_helper("-128.5")); + CHECK(accept_helper("0.999")); + CHECK(accept_helper("128.5")); + CHECK(accept_helper("-0.0")); + } + + SECTION("with exponent") + { + CHECK(accept_helper("-128.5E3")); + CHECK(accept_helper("-128.5E-3")); + CHECK(accept_helper("-0.0e1")); + CHECK(accept_helper("-0.0E1")); + } + } + + SECTION("overflow") + { + // overflows during parsing + CHECK(!accept_helper("1.18973e+4932")); + } + + SECTION("invalid numbers") + { + CHECK(accept_helper("01") == false); + CHECK(accept_helper("--1") == false); + CHECK(accept_helper("1.") == false); + CHECK(accept_helper("1E") == false); + CHECK(accept_helper("1E-") == false); + CHECK(accept_helper("1.E1") == false); + CHECK(accept_helper("-1E") == false); + CHECK(accept_helper("-0E#") == false); + CHECK(accept_helper("-0E-#") == false); + CHECK(accept_helper("-0#") == false); + CHECK(accept_helper("-0.0:") == false); + CHECK(accept_helper("-0.0Z") == false); + CHECK(accept_helper("-0E123:") == false); + CHECK(accept_helper("-0e0-:") == false); + CHECK(accept_helper("-0e-:") == false); + CHECK(accept_helper("-0f") == false); + + // numbers must not begin with "+" + CHECK(accept_helper("+1") == false); + CHECK(accept_helper("+0") == false); + } + } + } + + SECTION("parse errors") + { + // unexpected end of number + CHECK_THROWS_WITH_AS(parser_helper("0."), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected digit after '.'; last read: '0.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid number; expected digit after '-'; last read: '-'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("--"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid number; expected digit after '-'; last read: '--'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-0."), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid number; expected digit after '.'; last read: '-0.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-."), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid number; expected digit after '-'; last read: '-.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("-:"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid number; expected digit after '-'; last read: '-:'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("0.:"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected digit after '.'; last read: '0.:'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("e."), + "[json.exception.parse_error.101] parse error at line 1, column 1: syntax error while parsing value - invalid literal; last read: 'e'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1e."), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1e.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1e/"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1e/'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1e:"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1e:'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1E."), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1E.'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1E/"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1E/'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("1E:"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid number; expected '+', '-', or digit after exponent; last read: '1E:'", json::parse_error&); + + // unexpected end of null + CHECK_THROWS_WITH_AS(parser_helper("n"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid literal; last read: 'n'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("nu"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid literal; last read: 'nu'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("nul"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'nul'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("nulk"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'nulk'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("nulm"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'nulm'", json::parse_error&); + + // unexpected end of true + CHECK_THROWS_WITH_AS(parser_helper("t"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid literal; last read: 't'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("tr"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid literal; last read: 'tr'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("tru"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'tru'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("trud"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'trud'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("truf"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'truf'", json::parse_error&); + + // unexpected end of false + CHECK_THROWS_WITH_AS(parser_helper("f"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid literal; last read: 'f'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("fa"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid literal; last read: 'fa'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("fal"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid literal; last read: 'fal'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("fals"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid literal; last read: 'fals'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("falsd"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid literal; last read: 'falsd'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("falsf"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid literal; last read: 'falsf'", json::parse_error&); + + // missing/unexpected end of array + CHECK_THROWS_WITH_AS(parser_helper("["), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("[1"), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing array - unexpected end of input; expected ']'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("[1,"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("[1,]"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - unexpected ']'; expected '[', '{', or a literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("]"), + "[json.exception.parse_error.101] parse error at line 1, column 1: syntax error while parsing value - unexpected ']'; expected '[', '{', or a literal", json::parse_error&); + + // missing/unexpected end of object + CHECK_THROWS_WITH_AS(parser_helper("{"), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing object key - unexpected end of input; expected string literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("{\"foo\""), + "[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing object separator - unexpected end of input; expected ':'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("{\"foo\":"), + "[json.exception.parse_error.101] parse error at line 1, column 8: syntax error while parsing value - unexpected end of input; expected '[', '{', or a literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("{\"foo\":}"), + "[json.exception.parse_error.101] parse error at line 1, column 8: syntax error while parsing value - unexpected '}'; expected '[', '{', or a literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("{\"foo\":1,}"), + "[json.exception.parse_error.101] parse error at line 1, column 10: syntax error while parsing object key - unexpected '}'; expected string literal", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("}"), + "[json.exception.parse_error.101] parse error at line 1, column 1: syntax error while parsing value - unexpected '}'; expected '[', '{', or a literal", json::parse_error&); + + // missing/unexpected end of string + CHECK_THROWS_WITH_AS(parser_helper("\""), + "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid string: missing closing quote; last read: '\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\\""), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: missing closing quote; last read: '\"\\\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u\""), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u0\""), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u0\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u01\""), + "[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u01\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u012\""), + "[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u012\"'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u"), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u0"), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u0'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u01"), + "[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u01'", json::parse_error&); + CHECK_THROWS_WITH_AS(parser_helper("\"\\u012"), + "[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '\"\\u012'", json::parse_error&); + + // invalid escapes + for (int c = 1; c < 128; ++c) + { + auto s = std::string("\"\\") + std::string(1, static_cast(c)) + "\""; + + switch (c) + { + // valid escapes + case ('"'): + case ('\\'): + case ('/'): + case ('b'): + case ('f'): + case ('n'): + case ('r'): + case ('t'): + { + CHECK_NOTHROW(parser_helper(s)); + break; + } + + // \u must be followed with four numbers, so we skip it here + case ('u'): + { + break; + } + + // any other combination of backslash and character is invalid + default: + { + CHECK_THROWS_AS(parser_helper(s), json::parse_error&); + // only check error message if c is not a control character + if (c > 0x1f) + { + CHECK_THROWS_WITH_STD_STR(parser_helper(s), + "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid string: forbidden character after backslash; last read: '\"\\" + std::string(1, static_cast(c)) + "'"); + } + break; + } + } + } + + // invalid \uxxxx escapes + { + // check whether character is a valid hex character + const auto valid = [](int c) + { + switch (c) + { + case ('0'): + case ('1'): + case ('2'): + case ('3'): + case ('4'): + case ('5'): + case ('6'): + case ('7'): + case ('8'): + case ('9'): + case ('a'): + case ('b'): + case ('c'): + case ('d'): + case ('e'): + case ('f'): + case ('A'): + case ('B'): + case ('C'): + case ('D'): + case ('E'): + case ('F'): + { + return true; + } + + default: + { + return false; + } + } + }; + + for (int c = 1; c < 128; ++c) + { + std::string const s = "\"\\u"; + + // create a string with the iterated character at each position + auto s1 = s + "000" + std::string(1, static_cast(c)) + "\""; + auto s2 = s + "00" + std::string(1, static_cast(c)) + "0\""; + auto s3 = s + "0" + std::string(1, static_cast(c)) + "00\""; + auto s4 = s + std::string(1, static_cast(c)) + "000\""; + + if (valid(c)) + { + CAPTURE(s1) + CHECK_NOTHROW(parser_helper(s1)); + CAPTURE(s2) + CHECK_NOTHROW(parser_helper(s2)); + CAPTURE(s3) + CHECK_NOTHROW(parser_helper(s3)); + CAPTURE(s4) + CHECK_NOTHROW(parser_helper(s4)); + } + else + { + CAPTURE(s1) + CHECK_THROWS_AS(parser_helper(s1), json::parse_error&); + // only check error message if c is not a control character + if (c > 0x1f) + { + CHECK_THROWS_WITH_STD_STR(parser_helper(s1), + "[json.exception.parse_error.101] parse error at line 1, column 7: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s1.substr(0, 7) + "'"); + } + + CAPTURE(s2) + CHECK_THROWS_AS(parser_helper(s2), json::parse_error&); + // only check error message if c is not a control character + if (c > 0x1f) + { + CHECK_THROWS_WITH_STD_STR(parser_helper(s2), + "[json.exception.parse_error.101] parse error at line 1, column 6: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s2.substr(0, 6) + "'"); + } + + CAPTURE(s3) + CHECK_THROWS_AS(parser_helper(s3), json::parse_error&); + // only check error message if c is not a control character + if (c > 0x1f) + { + CHECK_THROWS_WITH_STD_STR(parser_helper(s3), + "[json.exception.parse_error.101] parse error at line 1, column 5: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s3.substr(0, 5) + "'"); + } + + CAPTURE(s4) + CHECK_THROWS_AS(parser_helper(s4), json::parse_error&); + // only check error message if c is not a control character + if (c > 0x1f) + { + CHECK_THROWS_WITH_STD_STR(parser_helper(s4), + "[json.exception.parse_error.101] parse error at line 1, column 4: syntax error while parsing value - invalid string: '\\u' must be followed by 4 hex digits; last read: '" + s4.substr(0, 4) + "'"); + } + } + } + } + + json _; + + // missing part of a surrogate pair + CHECK_THROWS_WITH_AS(_ = json::parse("\"\\uD80C\""), "[json.exception.parse_error.101] parse error at line 1, column 8: syntax error while parsing value - invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\"'", json::parse_error&); + // invalid surrogate pair + CHECK_THROWS_WITH_AS(_ = json::parse("\"\\uD80C\\uD80C\""), + "[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\\uD80C'", json::parse_error&); + CHECK_THROWS_WITH_AS(_ = json::parse("\"\\uD80C\\u0000\""), + "[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\\u0000'", json::parse_error&); + CHECK_THROWS_WITH_AS(_ = json::parse("\"\\uD80C\\uFFFF\""), + "[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing value - invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF; last read: '\"\\uD80C\\uFFFF'", json::parse_error&); + } + + SECTION("parse errors (accept)") + { + // unexpected end of number + CHECK(accept_helper("0.") == false); + CHECK(accept_helper("-") == false); + CHECK(accept_helper("--") == false); + CHECK(accept_helper("-0.") == false); + CHECK(accept_helper("-.") == false); + CHECK(accept_helper("-:") == false); + CHECK(accept_helper("0.:") == false); + CHECK(accept_helper("e.") == false); + CHECK(accept_helper("1e.") == false); + CHECK(accept_helper("1e/") == false); + CHECK(accept_helper("1e:") == false); + CHECK(accept_helper("1E.") == false); + CHECK(accept_helper("1E/") == false); + CHECK(accept_helper("1E:") == false); + + // unexpected end of null + CHECK(accept_helper("n") == false); + CHECK(accept_helper("nu") == false); + CHECK(accept_helper("nul") == false); + + // unexpected end of true + CHECK(accept_helper("t") == false); + CHECK(accept_helper("tr") == false); + CHECK(accept_helper("tru") == false); + + // unexpected end of false + CHECK(accept_helper("f") == false); + CHECK(accept_helper("fa") == false); + CHECK(accept_helper("fal") == false); + CHECK(accept_helper("fals") == false); + + // missing/unexpected end of array + CHECK(accept_helper("[") == false); + CHECK(accept_helper("[1") == false); + CHECK(accept_helper("[1,") == false); + CHECK(accept_helper("[1,]") == false); + CHECK(accept_helper("]") == false); + + // missing/unexpected end of object + CHECK(accept_helper("{") == false); + CHECK(accept_helper("{\"foo\"") == false); + CHECK(accept_helper("{\"foo\":") == false); + CHECK(accept_helper("{\"foo\":}") == false); + CHECK(accept_helper("{\"foo\":1,}") == false); + CHECK(accept_helper("}") == false); + + // missing/unexpected end of string + CHECK(accept_helper("\"") == false); + CHECK(accept_helper("\"\\\"") == false); + CHECK(accept_helper("\"\\u\"") == false); + CHECK(accept_helper("\"\\u0\"") == false); + CHECK(accept_helper("\"\\u01\"") == false); + CHECK(accept_helper("\"\\u012\"") == false); + CHECK(accept_helper("\"\\u") == false); + CHECK(accept_helper("\"\\u0") == false); + CHECK(accept_helper("\"\\u01") == false); + CHECK(accept_helper("\"\\u012") == false); + + // unget of newline + CHECK(parser_helper("\n123\n") == 123); + + // invalid escapes + for (int c = 1; c < 128; ++c) + { + auto s = std::string("\"\\") + std::string(1, static_cast(c)) + "\""; + + switch (c) + { + // valid escapes + case ('"'): + case ('\\'): + case ('/'): + case ('b'): + case ('f'): + case ('n'): + case ('r'): + case ('t'): + { + CHECK(json::parser(nlohmann::detail::input_adapter(s)).accept()); + break; + } + + // \u must be followed with four numbers, so we skip it here + case ('u'): + { + break; + } + + // any other combination of backslash and character is invalid + default: + { + CHECK(json::parser(nlohmann::detail::input_adapter(s)).accept() == false); + break; + } + } + } + + // invalid \uxxxx escapes + { + // check whether character is a valid hex character + const auto valid = [](int c) + { + switch (c) + { + case ('0'): + case ('1'): + case ('2'): + case ('3'): + case ('4'): + case ('5'): + case ('6'): + case ('7'): + case ('8'): + case ('9'): + case ('a'): + case ('b'): + case ('c'): + case ('d'): + case ('e'): + case ('f'): + case ('A'): + case ('B'): + case ('C'): + case ('D'): + case ('E'): + case ('F'): + { + return true; + } + + default: + { + return false; + } + } + }; + + for (int c = 1; c < 128; ++c) + { + std::string const s = "\"\\u"; + + // create a string with the iterated character at each position + const auto s1 = s + "000" + std::string(1, static_cast(c)) + "\""; + const auto s2 = s + "00" + std::string(1, static_cast(c)) + "0\""; + const auto s3 = s + "0" + std::string(1, static_cast(c)) + "00\""; + const auto s4 = s + std::string(1, static_cast(c)) + "000\""; + + if (valid(c)) + { + CAPTURE(s1) + CHECK(json::parser(nlohmann::detail::input_adapter(s1)).accept()); + CAPTURE(s2) + CHECK(json::parser(nlohmann::detail::input_adapter(s2)).accept()); + CAPTURE(s3) + CHECK(json::parser(nlohmann::detail::input_adapter(s3)).accept()); + CAPTURE(s4) + CHECK(json::parser(nlohmann::detail::input_adapter(s4)).accept()); + } + else + { + CAPTURE(s1) + CHECK(json::parser(nlohmann::detail::input_adapter(s1)).accept() == false); + + CAPTURE(s2) + CHECK(json::parser(nlohmann::detail::input_adapter(s2)).accept() == false); + + CAPTURE(s3) + CHECK(json::parser(nlohmann::detail::input_adapter(s3)).accept() == false); + + CAPTURE(s4) + CHECK(json::parser(nlohmann::detail::input_adapter(s4)).accept() == false); + } + } + } + + // missing part of a surrogate pair + CHECK(accept_helper("\"\\uD80C\"") == false); + // invalid surrogate pair + CHECK(accept_helper("\"\\uD80C\\uD80C\"") == false); + CHECK(accept_helper("\"\\uD80C\\u0000\"") == false); + CHECK(accept_helper("\"\\uD80C\\uFFFF\"") == false); + } + + SECTION("tests found by mutate++") + { + // test case to make sure no comma precedes the first key + CHECK_THROWS_WITH_AS(parser_helper("{,\"key\": false}"), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing object key - unexpected ','; expected string literal", json::parse_error&); + // test case to make sure an object is properly closed + CHECK_THROWS_WITH_AS(parser_helper("[{\"key\": false true]"), "[json.exception.parse_error.101] parse error at line 1, column 19: syntax error while parsing object - unexpected true literal; expected '}'", json::parse_error&); + + // test case to make sure the callback is properly evaluated after reading a key + { + json::parser_callback_t const cb = [](int /*unused*/, json::parse_event_t event, json& /*unused*/) noexcept + { + return event != json::parse_event_t::key; + }; + + json x = json::parse("{\"key\": false}", cb); + CHECK(x == json::object()); + } + } + + SECTION("callback function") + { + const auto* s_object = R"( + { + "foo": 2, + "bar": { + "baz": 1 + } + } + )"; + + const auto* s_array = R"( + [1,2,[3,4,5],4,5] + )"; + + const auto* structured_array = R"( + [ + 1, + { + "foo": "bar" + }, + { + "qux": "baz" + } + ] + )"; + + SECTION("filter nothing") + { + json j_object = json::parse(s_object, [](int /*unused*/, json::parse_event_t /*unused*/, const json& /*unused*/) noexcept + { + return true; + }); + + CHECK (j_object == json({{"foo", 2}, {"bar", {{"baz", 1}}}})); + + json j_array = json::parse(s_array, [](int /*unused*/, json::parse_event_t /*unused*/, const json& /*unused*/) noexcept + { + return true; + }); + + CHECK (j_array == json({1, 2, {3, 4, 5}, 4, 5})); + } + + SECTION("filter everything") + { + json const j_object = json::parse(s_object, [](int /*unused*/, json::parse_event_t /*unused*/, const json& /*unused*/) noexcept + { + return false; + }); + + // the top-level object will be discarded, leaving a null + CHECK (j_object.is_null()); + + json const j_array = json::parse(s_array, [](int /*unused*/, json::parse_event_t /*unused*/, const json& /*unused*/) noexcept + { + return false; + }); + + // the top-level array will be discarded, leaving a null + CHECK (j_array.is_null()); + } + + SECTION("filter specific element") + { + json j_object = json::parse(s_object, [](int /*unused*/, json::parse_event_t event, const json & j) noexcept + { + // filter all number(2) elements + return event != json::parse_event_t::value || j != json(2); + }); + + CHECK (j_object == json({{"bar", {{"baz", 1}}}})); + + json j_array = json::parse(s_array, [](int /*unused*/, json::parse_event_t event, const json & j) noexcept + { + return event != json::parse_event_t::value || j != json(2); + }); + + CHECK (j_array == json({1, {3, 4, 5}, 4, 5})); + } + + SECTION("filter object in array") + { + json j_filtered1 = json::parse(structured_array, [](int /*unused*/, json::parse_event_t e, const json & parsed) + { + return !(e == json::parse_event_t::object_end && parsed.contains("foo")); + }); + + // the specified object will be discarded, and removed. + CHECK (j_filtered1.size() == 2); + CHECK (j_filtered1 == json({1, {{"qux", "baz"}}})); + + json j_filtered2 = json::parse(structured_array, [](int /*unused*/, json::parse_event_t e, const json& /*parsed*/) noexcept + { + return e != json::parse_event_t::object_end; + }); + + // removed all objects in array. + CHECK (j_filtered2.size() == 1); + CHECK (j_filtered2 == json({1})); + } + + SECTION("filter specific events") + { + SECTION("first closing event") + { + { + json j_object = json::parse(s_object, [](int /*unused*/, json::parse_event_t e, const json& /*unused*/) noexcept + { + static bool first = true; + if (e == json::parse_event_t::object_end && first) + { + first = false; + return false; + } + + return true; + }); + + // the first completed object will be discarded + CHECK (j_object == json({{"foo", 2}})); + } + + { + json j_array = json::parse(s_array, [](int /*unused*/, json::parse_event_t e, const json& /*unused*/) noexcept + { + static bool first = true; + if (e == json::parse_event_t::array_end && first) + { + first = false; + return false; + } + + return true; + }); + + // the first completed array will be discarded + CHECK (j_array == json({1, 2, 4, 5})); + } + } + } + + SECTION("special cases") + { + // the following test cases cover the situation in which an empty + // object and array is discarded only after the closing character + // has been read + + json j_empty_object = json::parse("{}", [](int /*unused*/, json::parse_event_t e, const json& /*unused*/) noexcept + { + return e != json::parse_event_t::object_end; + }); + CHECK(j_empty_object == json()); + + json j_empty_array = json::parse("[]", [](int /*unused*/, json::parse_event_t e, const json& /*unused*/) noexcept + { + return e != json::parse_event_t::array_end; + }); + CHECK(j_empty_array == json()); + } + } + + SECTION("constructing from contiguous containers") + { + SECTION("from std::vector") + { + std::vector v = {'t', 'r', 'u', 'e'}; + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + + SECTION("from std::array") + { + std::array v { {'t', 'r', 'u', 'e'} }; + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + + SECTION("from array") + { + uint8_t v[] = {'t', 'r', 'u', 'e'}; // NOLINT(cppcoreguidelines-avoid-c-arrays,hicpp-avoid-c-arrays,modernize-avoid-c-arrays) + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + + SECTION("from char literal") + { + CHECK(parser_helper("true") == json(true)); + } + + SECTION("from std::string") + { + std::string v = {'t', 'r', 'u', 'e'}; + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + + SECTION("from std::initializer_list") + { + std::initializer_list const v = {'t', 'r', 'u', 'e'}; + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + + SECTION("from std::valarray") + { + std::valarray v = {'t', 'r', 'u', 'e'}; + json j; + json::parser(nlohmann::detail::input_adapter(std::begin(v), std::end(v))).parse(true, j); + CHECK(j == json(true)); + } + } + + SECTION("improve test coverage") + { + SECTION("parser with callback") + { + json::parser_callback_t const cb = [](int /*unused*/, json::parse_event_t /*unused*/, json& /*unused*/) noexcept + { + return true; + }; + + CHECK(json::parse("{\"foo\": true:", cb, false).is_discarded()); + + json _; + CHECK_THROWS_WITH_AS(_ = json::parse("{\"foo\": true:", cb), "[json.exception.parse_error.101] parse error at line 1, column 13: syntax error while parsing object - unexpected ':'; expected '}'", json::parse_error&); + + CHECK_THROWS_WITH_AS(_ = json::parse("1.18973e+4932", cb), "[json.exception.out_of_range.406] number overflow parsing '1.18973e+4932'", json::out_of_range&); + } + + SECTION("SAX parser") + { + SECTION("} without value") + { + SaxCountdown s(1); + CHECK(json::sax_parse("{}", &s) == false); + } + + SECTION("} with value") + { + SaxCountdown s(3); + CHECK(json::sax_parse("{\"k1\": true}", &s) == false); + } + + SECTION("second key") + { + SaxCountdown s(3); + CHECK(json::sax_parse("{\"k1\": true, \"k2\": false}", &s) == false); + } + + SECTION("] without value") + { + SaxCountdown s(1); + CHECK(json::sax_parse("[]", &s) == false); + } + + SECTION("] with value") + { + SaxCountdown s(2); + CHECK(json::sax_parse("[1]", &s) == false); + } + + SECTION("float") + { + SaxCountdown s(0); + CHECK(json::sax_parse("3.14", &s) == false); + } + + SECTION("false") + { + SaxCountdown s(0); + CHECK(json::sax_parse("false", &s) == false); + } + + SECTION("null") + { + SaxCountdown s(0); + CHECK(json::sax_parse("null", &s) == false); + } + + SECTION("true") + { + SaxCountdown s(0); + CHECK(json::sax_parse("true", &s) == false); + } + + SECTION("unsigned") + { + SaxCountdown s(0); + CHECK(json::sax_parse("12", &s) == false); + } + + SECTION("integer") + { + SaxCountdown s(0); + CHECK(json::sax_parse("-12", &s) == false); + } + + SECTION("string") + { + SaxCountdown s(0); + CHECK(json::sax_parse("\"foo\"", &s) == false); + } + } + } + + SECTION("error messages for comments") + { + json _; + CHECK_THROWS_WITH_AS(_ = json::parse("/a", nullptr, true, true), "[json.exception.parse_error.101] parse error at line 1, column 2: syntax error while parsing value - invalid comment; expecting '/' or '*' after '/'; last read: '/a'", json::parse_error); + CHECK_THROWS_WITH_AS(_ = json::parse("/*", nullptr, true, true), "[json.exception.parse_error.101] parse error at line 1, column 3: syntax error while parsing value - invalid comment; missing closing '*/'; last read: '/*'", json::parse_error); + } + } + + ```` + + +* + ```` + + ??? "Click to view reference" + + .. code-block:: + + `gitlab.com` + +{% endraw %} + +**Fallacies:** + +*None* diff --git a/docs/trustable/trudag/m2r2_test/dashboard.rst b/docs/trustable/trudag/m2r2_test/dashboard.rst new file mode 100644 index 0000000..4448872 --- /dev/null +++ b/docs/trustable/trudag/m2r2_test/dashboard.rst @@ -0,0 +1,137 @@ + +Dashboard +========= + +Evidence Score Distribution +--------------------------- + +The distribution of scores for evidence nodes across the graph. + + +.. image:: figs/evidence_hist.svg + :target: figs/evidence_hist.svg + :alt: No Image + + +??? example "click to view figure as table" + +.. code-block:: + + |bin|count| + |-|-| + |0.0-0.1 {style="background-color:hsl(12.0, 100%, 61%)"} |16| + |0.1-0.2 {style="background-color:hsl(24.0, 100%, 58%)"} |0| + |0.2-0.3 {style="background-color:hsl(36.0, 100%, 54%)"} |0| + |0.3-0.4 {style="background-color:hsl(48.0, 100%, 51%)"} |0| + |0.4-0.5 {style="background-color:hsl(60.0, 100%, 47%)"} |0| + |0.5-0.6 {style="background-color:hsl(72.0, 100%, 44%)"} |0| + |0.6-0.7 {style="background-color:hsl(84.0, 100%, 40%)"} |0| + |0.7-0.8 {style="background-color:hsl(96.0, 100%, 37%)"} |0| + |0.8-0.9 {style="background-color:hsl(108.0, 100%, 33%)"} |0| + |0.9-1.0 {style="background-color:hsl(120.0, 100%, 30%)"} |0| + +Expectations Score Distribution +------------------------------- + +The distribution of scores for expectations nodes across the graph. + + +.. image:: figs/expectations_hist.svg + :target: figs/expectations_hist.svg + :alt: No Image + + +??? example "click to view figure as table" + +.. code-block:: + + |bin|count| + |-|-| + |0.0-0.1 {style="background-color:hsl(12.0, 100%, 61%)"} |1| + |0.1-0.2 {style="background-color:hsl(24.0, 100%, 58%)"} |0| + |0.2-0.3 {style="background-color:hsl(36.0, 100%, 54%)"} |0| + |0.3-0.4 {style="background-color:hsl(48.0, 100%, 51%)"} |0| + |0.4-0.5 {style="background-color:hsl(60.0, 100%, 47%)"} |0| + |0.5-0.6 {style="background-color:hsl(72.0, 100%, 44%)"} |0| + |0.6-0.7 {style="background-color:hsl(84.0, 100%, 40%)"} |0| + |0.7-0.8 {style="background-color:hsl(96.0, 100%, 37%)"} |0| + |0.8-0.9 {style="background-color:hsl(108.0, 100%, 33%)"} |0| + |0.9-1.0 {style="background-color:hsl(120.0, 100%, 30%)"} |0| + +All Score Distribution +---------------------- + +The distribution of scores for all nodes across the graph. + + +.. image:: figs/all_hist.svg + :target: figs/all_hist.svg + :alt: No Image + + +??? example "click to view figure as table" + +.. code-block:: + + |bin|count| + |-|-| + |0.0-0.1 {style="background-color:hsl(12.0, 100%, 61%)"} |24| + |0.1-0.2 {style="background-color:hsl(24.0, 100%, 58%)"} |0| + |0.2-0.3 {style="background-color:hsl(36.0, 100%, 54%)"} |0| + |0.3-0.4 {style="background-color:hsl(48.0, 100%, 51%)"} |0| + |0.4-0.5 {style="background-color:hsl(60.0, 100%, 47%)"} |0| + |0.5-0.6 {style="background-color:hsl(72.0, 100%, 44%)"} |0| + |0.6-0.7 {style="background-color:hsl(84.0, 100%, 40%)"} |0| + |0.7-0.8 {style="background-color:hsl(96.0, 100%, 37%)"} |0| + |0.8-0.9 {style="background-color:hsl(108.0, 100%, 33%)"} |0| + |0.9-1.0 {style="background-color:hsl(120.0, 100%, 30%)"} |0| + +Lennart SME Score Distribution +------------------------------ + + +.. image:: figs/Lennart_hist.svg + :target: figs/Lennart_hist.svg + :alt: No Image + + +??? example "click to view figure as table" + +.. code-block:: + + |bin|count| + |-|-| + |0.0-0.1 {style="background-color:hsl(12.0, 100%, 61%)"} |0| + |0.1-0.2 {style="background-color:hsl(24.0, 100%, 58%)"} |0| + |0.2-0.3 {style="background-color:hsl(36.0, 100%, 54%)"} |0| + |0.3-0.4 {style="background-color:hsl(48.0, 100%, 51%)"} |0| + |0.4-0.5 {style="background-color:hsl(60.0, 100%, 47%)"} |0| + |0.5-0.6 {style="background-color:hsl(72.0, 100%, 44%)"} |0| + |0.6-0.7 {style="background-color:hsl(84.0, 100%, 40%)"} |0| + |0.7-0.8 {style="background-color:hsl(96.0, 100%, 37%)"} |0| + |0.8-0.9 {style="background-color:hsl(108.0, 100%, 33%)"} |0| + |0.9-1.0 {style="background-color:hsl(120.0, 100%, 30%)"} |1| + +Summary +------- + +.. list-table:: + :header-rows: 1 + + * - Category + - Count + * - statements + - 24 + * - reviewed statements + - 23 + * - unreviewed statements + - 1 + * - orphaned statements + - 0 + * - statements with evidence + - 7 + * - evidence + - 16 + * - expectations + - 1 + diff --git a/docs/trustable/trudag/m2r2_test/nav.rst b/docs/trustable/trudag/m2r2_test/nav.rst new file mode 100644 index 0000000..b072595 --- /dev/null +++ b/docs/trustable/trudag/m2r2_test/nav.rst @@ -0,0 +1,11 @@ +nav +=== + + + +* `Compliance report `_ +* `Dashboard `_ +* `TA `_ +* `TRUSTABLE `_ +* `TT `_ +* `WFJ `_ diff --git a/docs/trustable/trudag/m2r2_test/processing.py b/docs/trustable/trudag/m2r2_test/processing.py new file mode 100644 index 0000000..f5bfa7f --- /dev/null +++ b/docs/trustable/trudag/m2r2_test/processing.py @@ -0,0 +1,279 @@ +import os +import re +import subprocess +import shutil + +# Directory containing the .rst files (directory of the script itself) +DIRECTORY = os.path.dirname(os.path.abspath(__file__)) # Path to the folder containing the Python file + + +def convert_and_move_md_files(input_dir, output_dir): + """ + Convert all .md files in the input directory to .rst files using m2r2 + and move them to the output directory. + + Args: + input_dir (str): Directory containing the .md files. + output_dir (str): Directory to move the converted .rst files to. + """ + # Ensure the output directory exists + os.makedirs(output_dir, exist_ok=True) + + # Iterate over all .md files in the input directory + for filename in os.listdir(input_dir): + if filename.endswith(".md"): # Process only .md files + md_file_path = os.path.join(input_dir, filename) + + # Call m2r2 to convert the file + print(f"Converting: {md_file_path}") + subprocess.run(["m2r2", md_file_path], check=True) + + # Determine the base name (without extension) + basename = os.path.splitext(filename)[0] + rst_file_name = f"{basename}.rst" + rst_file_path = os.path.join(input_dir, rst_file_name) + + # Move the .rst file to the output directory + if os.path.exists(rst_file_path): + shutil.move(rst_file_path, os.path.join(output_dir, rst_file_name)) + print(f"Converted and moved: {md_file_path} -> {os.path.join(output_dir, rst_file_name)}") + else: + print(f"Error: Expected .rst file not found for {md_file_path}") + +# Function to clean ".item-element" lines and remove content between {...} +def clean_item_element_references(directory): + print("Cleaning .item-element references...") + for filename in os.listdir(directory): + if filename.endswith(".rst"): # Process only .rst files + file_path = os.path.join(directory, filename) + print(f"Processing file: {file_path}") + + # Read the file content + with open(file_path, "r") as file: + lines = file.readlines() + + # Process each line and remove content between {...} for ".item-element" lines + processed_lines = [] + for line in lines: + if ".item-element" in line: + line = re.sub(r"{.*?}", "", line) # Remove content between { and } + processed_lines.append(line) + + # Write the cleaned lines back to the file + with open(file_path, "w") as file: + file.writelines(processed_lines) + print(f"File cleaned and saved: {file_path}") + + +def add_sections_and_headers(directory): + print("Adding sections and rearranging headers...") + + for filename in os.listdir(directory): + if filename.endswith(".rst"): # Process only .rst files + file_path = os.path.join(directory, filename) + print(f"Processing file: {file_path}") + + # Read the file content + with open(file_path, "r") as file: + lines = file.readlines() + + # Collect all existing section references (e.g., .. _ta-analysis:) + existing_references = set() + for line in lines: + match = re.match(r"^\.\.\s*_(.+?):\s*$", line.strip()) + if match: + existing_references.add(match.group(1).strip().lower()) + + # Process the file line by line + processed_lines = [] + i = 0 + while i < len(lines): + line = lines[i] + + # Match lines containing uppercase words with optional ### and symbols like _ + # E.g., TA-ANALYSIS ###, TT-CHANGES ###, TA-SUPPLY_CHAIN ###, etc. + match = re.match(r"^([A-Z0-9\-_]+)(\s*###)?$", line.strip()) + if match and i + 1 < len(lines): # Verify the next line exists for the ^^^ line + next_line = lines[i + 1].strip() + + # Check if the next line is all ^^^ (or longer) + if re.match(r"^\^{5,}$", next_line): # Line with `^^^^` or longer + section_name = match.group(1).strip() + section_reference = section_name.lower() + + # Only add a new reference if it doesn't already exist + if section_reference not in existing_references: + # Add two blank lines and a section declaration above the line + processed_lines.append("\n\n") # Two blank lines + processed_lines.append(f".. _{section_reference}:\n") # Add section reference + processed_lines.append("\n") # Additional blank line + + # Add the original title without the ### + processed_lines.append(f"{section_name}\n") + processed_lines.append(next_line + "\n") # Add the separator line + + # Skip to the line after the ^^^ line + i += 2 + continue # Skip further processing of this section + + # Append unmodified lines if no match + processed_lines.append(line) + i += 1 + + # Write the updated content back to the file + with open(file_path, "w") as file: + file.writelines(processed_lines) + print(f"Sections and headers updated: {file_path}") + + +# Function to replace markdown-style references with :ref: (remove full links) +def replace_markdown_references(directory): + print("Replacing markdown-style references with :ref:...") + for filename in os.listdir(directory): + if filename.endswith(".rst"): # Process only .rst files + file_path = os.path.join(directory, filename) + print(f"Processing file: {file_path}") + + # Read the file content + with open(file_path, "r") as file: + lines = file.readlines() + + # Replace all markdown-structured references with proper Sphinx :ref: format + processed_lines = [] + for line in lines: + # Match lines like `TA-ANALYSIS `_ + line = re.sub( + r"`.*?<.*?#([\w\-_.]+)>`_", # Match the structure, including backticks and trailing _ + r":ref:`\1`", # Replace it with the Sphinx :ref: format + line + ) + processed_lines.append(line) + + # Write the updated lines back to the file + with open(file_path, "w") as file: + file.writelines(processed_lines) + print(f"Markdown references replaced in: {file_path}") + + +def rewrite_trudag_report(nav_file_path, trudag_report_path): + print(f"Rewriting file: {trudag_report_path} based on: {nav_file_path}") + + # Read the content of nav.rst + try: + with open(nav_file_path, "r") as nav_file: + nav_lines = nav_file.readlines() + except FileNotFoundError: + print(f"Error: {nav_file_path} not found.") + return + + # Extract references from nav.rst and convert them to Sphinx-compatible paths + new_toc_entries = [] + for line in nav_lines: + # Match lines like `* `Compliance report `_` + match = re.match(r"\*\s*`.+? <(.+?)\.md>`_", line.strip()) + if match: + # Extract the file name without `.md` and convert to the Sphinx path + reference = match.group(1) + sphinx_path = f" trudag/m2r2_test/{reference}" + new_toc_entries.append(sphinx_path) + if not new_toc_entries: + print("No valid entries found in nav.rst.") + return + + # Construct the content of the new trudag_report.rst file + new_content = """.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +.. _trudag_report: + +Trudag Report +================= + +.. toctree:: + :maxdepth: 2 + :caption: Trudag Report + :glob: + +""" + new_content += "\n".join(new_toc_entries) + "\n" + + # Write the new content to trudag_report.rst + try: + with open(trudag_report_path, "w") as trudag_report_file: + trudag_report_file.write(new_content) + print(f"File {trudag_report_path} has been successfully rewritten.") + except Exception as e: + print(f"Error writing to file {trudag_report_path}: {e}") + +def add_missing_headers(directory): + print("Adding headers to .rst files where missing...") + + # Process each file in the directory + for filename in os.listdir(directory): + if filename.endswith(".rst"): # Only process .rst files + file_path = os.path.join(directory, filename) + print(f"Processing file: {file_path}") + + # Read the file content + with open(file_path, "r") as file: + lines = file.readlines() + + # Check if the file already has a header with `=` underline + has_header = False + for i in range(len(lines) - 1): # Iterate through lines and check pairs + current_line = lines[i].strip() + next_line = lines[i + 1].strip() + + # Check if the current line is text and the next line is `=` with the same length + if current_line and next_line == "=" * len(current_line): + has_header = True + break + + # Skip the file if it already has a header + if has_header: + print(f"Header already exists in: {filename}") + continue + + # Generate the header from the filename (strip the .rst extension) + document_name = os.path.splitext(filename)[0] + header = f"{document_name}\n{'=' * len(document_name)}\n\n" + + # Add the header at the top of the file + updated_lines = [header] + lines + + # Write the updated content back to the file + with open(file_path, "w") as file: + file.writelines(updated_lines) + print(f"Header added to: {filename}") + + +# Run all functions +if __name__ == "__main__": + # Convert .md files to .rst and move them + convert_and_move_md_files(os.path.join(DIRECTORY, ".."), DIRECTORY) + + # Clean ".item-element" references + clean_item_element_references(DIRECTORY) + + # Add sections and headers + add_sections_and_headers(DIRECTORY) + + # Replace markdown-style references with :ref: + replace_markdown_references(DIRECTORY) + + # Update trudag_report.rst based on nav.rst + nav_file_path = rewrite_trudag_report(DIRECTORY + "/nav.rst", "/workspaces/inc_json/docs/trustable/trudag_report.rst") + + #Add missing headers to .rst files + add_missing_headers(DIRECTORY) diff --git a/docs/trustable/trudag/m2r2_test/trustable_report_for_json_library.rst b/docs/trustable/trudag/m2r2_test/trustable_report_for_json_library.rst new file mode 100644 index 0000000..400eb47 --- /dev/null +++ b/docs/trustable/trudag/m2r2_test/trustable_report_for_json_library.rst @@ -0,0 +1,153 @@ + +Trustable Compliance Report +=========================== + +Item status guide ## { .subsection } +------------------------------------ + +Each item in a Trustable Graph is scored with a number between 0 and 1. +The score represents aggregated organizational confidence in a given Statement, with larger numbers corresponding to higher confidence. +Scores in the report are indicated by both a numerical score and the colormap below: + + +.. raw:: html + +
+ 1.00  +  0.00 +
+ + +The status of an item and its links also affect the score. + +Unreviewed items are indicated by a strikethrough. +The score of unreviewed items is always set to zero. + +Suspect links are indicated by italics. +The contribution to the score of a parent item by a suspiciously linked child is always zero, regardless of the child's own score. + +Compliance for TA +----------------- + +.. list-table:: + :header-rows: 1 + + * - Item + - Summary + - Score + * - :ref:`ta-analysis` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Collected data from tests and monitoring of deployed software is analysed according to specified objectives. + - 0.00 + * - :ref:`ta-behaviours` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Expected or required behaviours for JSON-Library are identified, specified, verified and validated based on analysis. + - 0.00 + * - :ref:`ta-confidence` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Confidence in JSON-Library is measured based on results of analysis. + - 0.00 + * - :ref:`ta-constraints` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Constraints on adaptation and deployment of JSON-Library are specified. + - 0.00 + * - :ref:`ta-data` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Data is collected from tests, and from monitoring of deployed software, according to specified objectives. + - 0.00 + * - :ref:`ta-fixes` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Known bugs or misbehaviours are analysed and triaged, and critical fixes or mitigations are implemented or applied. + - 0.00 + * - :ref:`ta-indicators` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Advance warning indicators for misbehaviours are identified, and monitoring mechanisms are specified, verified and validated based on analysis. + - 0.00 + * - :ref:`ta-inputs` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - All inputs to JSON-Library are assessed, to identify potential risks and issues. + - 0.00 + * - :ref:`ta-iterations` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - All constructed iterations of JSON-Library include source code, build instructions, tests, results and attestations. + - 0.00 + * - :ref:`ta-methodologies` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Manual methodologies applied for JSON-Library by contributors, and their results, are managed according to specified objectives. + - 0.00 + * - :ref:`ta-misbehaviours` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Prohibited misbehaviours for JSON-Library are identified, and mitigations are specified, verified and validated based on analysis. + - 0.00 + * - :ref:`ta-releases` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Construction of JSON-Library releases is fully repeatable and the results are fully reproducible, with any exceptions documented and justified. + - 0.00 + * - :ref:`ta-supply_chain` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - All sources for JSON-Library and tools are mirrored in our controlled environment. + - 0.00 + * - :ref:`ta-tests` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - All tests for JSON-Library, and its build and test environments, are constructed from controlled/mirrored sources and are reproducible, with any exceptions documented. + - 0.00 + * - :ref:`ta-updates` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - JSON-Library components, configurations and tools are updated under specified change and configuration management controls. + - 0.00 + * - :ref:`ta-validation` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - All specified tests are executed repeatedly, under defined conditions in controlled environments, according to specified objectives. + - 0.00 + + +Compliance for TRUSTABLE +------------------------ + +.. list-table:: + :header-rows: 1 + + * - Item + - Summary + - Score + * - :ref:`trustable-software` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - This release of JSON-Library is Trustable. + - 0.00 + + +Compliance for TT +----------------- + +.. list-table:: + :header-rows: 1 + + * - Item + - Summary + - Score + * - :ref:`tt-changes` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - JSON-Library is actively maintained, with regular updates to dependencies, and changes are verified to prevent regressions. + - 0.00 + * - :ref:`tt-confidence` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Confidence in JSON-Library is achieved by measuring and analysing behaviour and evidence over time. + - 0.00 + * - :ref:`tt-construction` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Tools are provided to build JSON-Library from trusted sources (also provided) with full reproducibility. + - 0.00 + * - :ref:`tt-expectations` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Documentation is provided, specifying what JSON-Library is expected to do, and what it must not do, and how this is verified. + - 0.00 + * - :ref:`tt-provenance` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - All inputs (and attestations for claims) for JSON-Library are provided with known provenance. + - 0.00 + * - :ref:`tt-results` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"} + - Evidence is provided to demonstrate that JSON-Library does what it is supposed to do, and does not do what it must not do. + - 0.00 + + +Compliance for WFJ +------------------ + +.. list-table:: + :header-rows: 1 + + * - Item + - Summary + - Score + * - :ref:`wfj-01` {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)" .status-unreviewed} + - The service checks for the four primitive types (strings, numbers, booleans, null). + - 0.00 + + +---- + +_Generated for: json\ *library* + + +* _Repository root: /home/d93609/projects/inc\ *json* +* *Commit SHA: d7cdd5c* +* *Commit date/time: Fri Jul 4 06:58:09 2025* +* *Commit tag: d7cdd5cb13ee04aeb1dbdbde7956cc09e318d100* diff --git a/docs/trustable/trudag/nav.md b/docs/trustable/trudag/nav.md new file mode 100644 index 0000000..98c593b --- /dev/null +++ b/docs/trustable/trudag/nav.md @@ -0,0 +1,6 @@ +- [Compliance report](trustable_report_for_json_library.md) +- [Dashboard](dashboard.md) +* [TA](TA.md) +* [TRUSTABLE](TRUSTABLE.md) +* [TT](TT.md) +* [WFJ](WFJ.md) diff --git a/docs/trustable/trudag/trustable_report_for_json_library.md b/docs/trustable/trudag/trustable_report_for_json_library.md new file mode 100644 index 0000000..2e21ca7 --- /dev/null +++ b/docs/trustable/trudag/trustable_report_for_json_library.md @@ -0,0 +1,76 @@ +# Trustable Compliance Report + + + +## Item status guide ## { .subsection } + +Each item in a Trustable Graph is scored with a number between 0 and 1. +The score represents aggregated organizational confidence in a given Statement, with larger numbers corresponding to higher confidence. +Scores in the report are indicated by both a numerical score and the colormap below: +
+1.00  + 0.00 +
+ + +The status of an item and its links also affect the score. + +Unreviewed items are indicated by a strikethrough. +The score of unreviewed items is always set to zero. + + +Suspect links are indicated by italics. +The contribution to the score of a parent item by a suspiciously linked child is always zero, regardless of the child's own score. +## Compliance for TA + +| Item | Summary | Score | +|--------|---------|-------| +| [TA-ANALYSIS](TA.md#ta-analysis) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Collected data from tests and monitoring of deployed software is analysed according to specified objectives. | 0.00 | +| [TA-BEHAVIOURS](TA.md#ta-behaviours) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Expected or required behaviours for JSON-Library are identified, specified, verified and validated based on analysis. | 0.00 | +| [TA-CONFIDENCE](TA.md#ta-confidence) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Confidence in JSON-Library is measured based on results of analysis. | 0.00 | +| [TA-CONSTRAINTS](TA.md#ta-constraints) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Constraints on adaptation and deployment of JSON-Library are specified. | 0.00 | +| [TA-DATA](TA.md#ta-data) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Data is collected from tests, and from monitoring of deployed software, according to specified objectives. | 0.00 | +| [TA-FIXES](TA.md#ta-fixes) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Known bugs or misbehaviours are analysed and triaged, and critical fixes or mitigations are implemented or applied. | 0.00 | +| [TA-INDICATORS](TA.md#ta-indicators) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Advance warning indicators for misbehaviours are identified, and monitoring mechanisms are specified, verified and validated based on analysis. | 0.00 | +| [TA-INPUTS](TA.md#ta-inputs) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| All inputs to JSON-Library are assessed, to identify potential risks and issues. | 0.00 | +| [TA-ITERATIONS](TA.md#ta-iterations) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| All constructed iterations of JSON-Library include source code, build instructions, tests, results and attestations. | 0.00 | +| [TA-METHODOLOGIES](TA.md#ta-methodologies) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Manual methodologies applied for JSON-Library by contributors, and their results, are managed according to specified objectives. | 0.00 | +| [TA-MISBEHAVIOURS](TA.md#ta-misbehaviours) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Prohibited misbehaviours for JSON-Library are identified, and mitigations are specified, verified and validated based on analysis. | 0.00 | +| [TA-RELEASES](TA.md#ta-releases) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Construction of JSON-Library releases is fully repeatable and the results are fully reproducible, with any exceptions documented and justified. | 0.00 | +| [TA-SUPPLY_CHAIN](TA.md#ta-supply_chain) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| All sources for JSON-Library and tools are mirrored in our controlled environment. | 0.00 | +| [TA-TESTS](TA.md#ta-tests) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| All tests for JSON-Library, and its build and test environments, are constructed from controlled/mirrored sources and are reproducible, with any exceptions documented. | 0.00 | +| [TA-UPDATES](TA.md#ta-updates) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| JSON-Library components, configurations and tools are updated under specified change and configuration management controls. | 0.00 | +| [TA-VALIDATION](TA.md#ta-validation) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| All specified tests are executed repeatedly, under defined conditions in controlled environments, according to specified objectives. | 0.00 | + +## Compliance for TRUSTABLE + +| Item | Summary | Score | +|--------|---------|-------| +| [TRUSTABLE-SOFTWARE](TRUSTABLE.md#trustable-software) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| This release of JSON-Library is Trustable. | 0.00 | + +## Compliance for TT + +| Item | Summary | Score | +|--------|---------|-------| +| [TT-CHANGES](TT.md#tt-changes) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| JSON-Library is actively maintained, with regular updates to dependencies, and changes are verified to prevent regressions. | 0.00 | +| [TT-CONFIDENCE](TT.md#tt-confidence) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Confidence in JSON-Library is achieved by measuring and analysing behaviour and evidence over time. | 0.00 | +| [TT-CONSTRUCTION](TT.md#tt-construction) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Tools are provided to build JSON-Library from trusted sources (also provided) with full reproducibility. | 0.00 | +| [TT-EXPECTATIONS](TT.md#tt-expectations) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Documentation is provided, specifying what JSON-Library is expected to do, and what it must not do, and how this is verified. | 0.00 | +| [TT-PROVENANCE](TT.md#tt-provenance) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| All inputs (and attestations for claims) for JSON-Library are provided with known provenance. | 0.00 | +| [TT-RESULTS](TT.md#tt-results) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)"}| Evidence is provided to demonstrate that JSON-Library does what it is supposed to do, and does not do what it must not do. | 0.00 | + +## Compliance for WFJ + +| Item | Summary | Score | +|--------|---------|-------| +| [WFJ-01](WFJ.md#wfj-01) {class="tsf-score" style="background-color:hsl(0.0, 100%, 65%)" .status-unreviewed}| The service checks for the four primitive types (strings, numbers, booleans, null). | 0.00 | + + +--- + +_Generated for: json_library_ + +* _Repository root: /home/d93609/projects/inc_json_ +* _Commit SHA: d7cdd5c_ +* _Commit date/time: Fri Jul 4 06:58:09 2025_ +* _Commit tag: d7cdd5cb13ee04aeb1dbdbde7956cc09e318d100_ diff --git a/docs/trustable/trudag_report.rst b/docs/trustable/trudag_report.rst new file mode 100644 index 0000000..e51211f --- /dev/null +++ b/docs/trustable/trudag_report.rst @@ -0,0 +1,30 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +.. _trudag_report: + +Trudag Report +================= + +.. toctree:: + :maxdepth: 2 + :caption: Trudag Report + :glob: + + trudag/m2r2_test/trustable_report_for_json_library + trudag/m2r2_test/dashboard + trudag/m2r2_test/TA + trudag/m2r2_test/TRUSTABLE + trudag/m2r2_test/TT + trudag/m2r2_test/WFJ From 0149fcefabd06f9018ffff3f0d395cbd7c34370c Mon Sep 17 00:00:00 2001 From: Lennart Becker Date: Tue, 8 Jul 2025 05:37:47 +0000 Subject: [PATCH 10/10] format: processing.py -Implements suggested formatting from Ruff --- docs/trustable/trudag/m2r2_test/processing.py | 41 ++++++++++--------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/docs/trustable/trudag/m2r2_test/processing.py b/docs/trustable/trudag/m2r2_test/processing.py index f5bfa7f..81760d5 100644 --- a/docs/trustable/trudag/m2r2_test/processing.py +++ b/docs/trustable/trudag/m2r2_test/processing.py @@ -3,19 +3,16 @@ import subprocess import shutil -# Directory containing the .rst files (directory of the script itself) -DIRECTORY = os.path.dirname(os.path.abspath(__file__)) # Path to the folder containing the Python file +# Path to the folder containing the Python file +DIRECTORY = os.path.dirname(os.path.abspath(__file__)) +# Convert all .md files in the input directory to .rst files using m2r2 +# and move them to the output directory. +# Args: +# input_dir (str): Directory containing the .md files. +# output_dir (str): Directory to move the converted .rst files to. def convert_and_move_md_files(input_dir, output_dir): - """ - Convert all .md files in the input directory to .rst files using m2r2 - and move them to the output directory. - - Args: - input_dir (str): Directory containing the .md files. - output_dir (str): Directory to move the converted .rst files to. - """ # Ensure the output directory exists os.makedirs(output_dir, exist_ok=True) @@ -36,10 +33,13 @@ def convert_and_move_md_files(input_dir, output_dir): # Move the .rst file to the output directory if os.path.exists(rst_file_path): shutil.move(rst_file_path, os.path.join(output_dir, rst_file_name)) - print(f"Converted and moved: {md_file_path} -> {os.path.join(output_dir, rst_file_name)}") + print( + f"Converted and moved: {md_file_path} -> {os.path.join(output_dir, rst_file_name)}" + ) else: print(f"Error: Expected .rst file not found for {md_file_path}") + # Function to clean ".item-element" lines and remove content between {...} def clean_item_element_references(directory): print("Cleaning .item-element references...") @@ -67,7 +67,6 @@ def clean_item_element_references(directory): def add_sections_and_headers(directory): print("Adding sections and rearranging headers...") - for filename in os.listdir(directory): if filename.endswith(".rst"): # Process only .rst files file_path = os.path.join(directory, filename) @@ -93,7 +92,8 @@ def add_sections_and_headers(directory): # Match lines containing uppercase words with optional ### and symbols like _ # E.g., TA-ANALYSIS ###, TT-CHANGES ###, TA-SUPPLY_CHAIN ###, etc. match = re.match(r"^([A-Z0-9\-_]+)(\s*###)?$", line.strip()) - if match and i + 1 < len(lines): # Verify the next line exists for the ^^^ line + # Verify the next line exists for the ^^^ line + if match and i + 1 < len(lines): next_line = lines[i + 1].strip() # Check if the next line is all ^^^ (or longer) @@ -105,12 +105,12 @@ def add_sections_and_headers(directory): if section_reference not in existing_references: # Add two blank lines and a section declaration above the line processed_lines.append("\n\n") # Two blank lines - processed_lines.append(f".. _{section_reference}:\n") # Add section reference + processed_lines.append(f".. _{section_reference}:\n") processed_lines.append("\n") # Additional blank line # Add the original title without the ### processed_lines.append(f"{section_name}\n") - processed_lines.append(next_line + "\n") # Add the separator line + processed_lines.append(next_line + "\n") # Skip to the line after the ^^^ line i += 2 @@ -145,7 +145,7 @@ def replace_markdown_references(directory): line = re.sub( r"`.*?<.*?#([\w\-_.]+)>`_", # Match the structure, including backticks and trailing _ r":ref:`\1`", # Replace it with the Sphinx :ref: format - line + line, ) processed_lines.append(line) @@ -157,7 +157,6 @@ def replace_markdown_references(directory): def rewrite_trudag_report(nav_file_path, trudag_report_path): print(f"Rewriting file: {trudag_report_path} based on: {nav_file_path}") - # Read the content of nav.rst try: with open(nav_file_path, "r") as nav_file: @@ -216,9 +215,9 @@ def rewrite_trudag_report(nav_file_path, trudag_report_path): except Exception as e: print(f"Error writing to file {trudag_report_path}: {e}") + def add_missing_headers(directory): print("Adding headers to .rst files where missing...") - # Process each file in the directory for filename in os.listdir(directory): if filename.endswith(".rst"): # Only process .rst files @@ -273,7 +272,9 @@ def add_missing_headers(directory): replace_markdown_references(DIRECTORY) # Update trudag_report.rst based on nav.rst - nav_file_path = rewrite_trudag_report(DIRECTORY + "/nav.rst", "/workspaces/inc_json/docs/trustable/trudag_report.rst") + nav_file_path = rewrite_trudag_report( + DIRECTORY + "/nav.rst", "/workspaces/inc_json/docs/trustable/trudag_report.rst" + ) - #Add missing headers to .rst files + # Add missing headers to .rst files add_missing_headers(DIRECTORY)