2
0
mirror of https://github.com/gchq/CyberChef synced 2025-12-05 23:53:27 +00:00

Compare commits

...

135 Commits

Author SHA1 Message Date
n1474335
a5ea7f7d58 8.34.0 2019-06-28 15:03:55 +01:00
n1474335
d637ac7633 Updated CHANGELOG 2019-06-28 15:03:49 +01:00
n1474335
c1ad2386ef Improvements to Entropy operation. Converted to ArrayBuffers, improved efficiency with large files, added present method back in. 2019-06-28 15:00:19 +01:00
mshwed
b0b6de116d Fixed linting issue 2019-06-27 14:11:26 -04:00
mshwed
1b161f997b Refactored advanced entropy operation into entropy operation 2019-06-27 14:09:41 -04:00
mshwed
b99af58636 Merge branch 'master' of https://github.com/gchq/CyberChef into feature/advanced-entropy 2019-06-27 13:15:19 -04:00
n1474335
d4edbb3c3e 8.33.1 2019-06-27 17:30:43 +01:00
n1474335
1b765605ca Updated dependencies 2019-06-27 17:29:27 +01:00
n1474335
8286dc26ad Updated patch dependencies 2019-06-27 17:06:07 +01:00
n1474335
19a438c15b Updated Bootstrap dependecy 2019-06-27 16:58:26 +01:00
n1474335
12898a1a8e Merge branch 'artemisbot-features/bz2-comp' 2019-06-27 16:48:15 +01:00
n1474335
541f2a2988 8.33.0 2019-06-27 16:47:32 +01:00
n1474335
d0277dde3f Updated CHANGELOG 2019-06-27 16:47:26 +01:00
n1474335
d184e40116 Added min and max for Bzip2 block size arg 2019-06-27 16:45:16 +01:00
n1474335
596db07647 Merge branch 'features/bz2-comp' of https://github.com/artemisbot/CyberChef into artemisbot-features/bz2-comp 2019-06-27 16:44:42 +01:00
n1474335
c233c5c67e Modified operation request template description 2019-06-27 16:01:44 +01:00
n1474335
db788b57e7 Merge branch 'update-issue-templates' of https://github.com/h345983745/CyberChef into h345983745-update-issue-templates 2019-06-27 15:58:38 +01:00
n1474335
c0983654d8 8.32.1 2019-06-27 15:56:16 +01:00
n1474335
30bf95f2c1 Merge branch 'oct' of https://github.com/wh0/CyberChef into wh0-oct 2019-06-27 15:55:34 +01:00
n1474335
da178107f9 8.32.0 2019-06-27 15:50:30 +01:00
n1474335
4dda1d9e49 Updated CHANGELOG 2019-06-27 15:45:17 +01:00
n1474335
e11aec64cd Modified wording for IC op 2019-06-27 15:42:32 +01:00
n1474335
71575e49d7 Merge branch 'index-of-coincidence' of https://github.com/Ge0rg3/CyberChef into Ge0rg3-index-of-coincidence 2019-06-27 15:41:20 +01:00
n1474335
393d070b05 8.31.8 2019-06-27 15:37:12 +01:00
n1474335
d7e4c28cd0 Merge branch 'bugfix/json-to-csv' of https://github.com/MShwed/CyberChef into MShwed-bugfix/json-to-csv 2019-06-27 15:36:12 +01:00
n1474335
ccf2348cd6 8.31.7 2019-06-27 15:34:22 +01:00
n1474335
cde3eb2c39 Merge branch 'popover-patch' of https://github.com/Ge0rg3/CyberChef into Ge0rg3-popover-patch 2019-06-27 15:31:57 +01:00
wh0
f29d8eeda8 Parse octal escape sequences 2019-06-25 18:35:25 -07:00
mshwed
daee7ac761 Merge branch 'master' of https://github.com/gchq/CyberChef into feature/advanced-entropy 2019-06-20 14:11:51 -04:00
mshwed
313d1a580e Merge branch 'master' of https://github.com/gchq/CyberChef into bugfix/json-to-csv 2019-06-20 13:59:08 -04:00
j433866
ca9bab5d4c 8.31.6 2019-06-14 14:32:15 +01:00
j433866
e35ef8f39b Escape HTML for error messages being sent to alert 2019-06-14 14:31:38 +01:00
George O
e709582062 Disappearing Popover Fix 2019-06-10 19:19:50 +01:00
George O
a6732ba815 Added Index of Coincidence Tests 2019-06-09 00:57:02 +01:00
George O
466d872d30 Added Index of Coincidence Operation 2019-06-09 00:57:02 +01:00
j433866
3cf7238106 8.31.5 2019-06-07 11:04:00 +01:00
j433866
f8d08cc5db Merge branch 'j433866-aes-gcm-fix' 2019-06-07 10:55:53 +01:00
j433866
c1bdca8df3 Change to use byte strings instead of byte array 2019-06-04 10:19:00 +01:00
j433866
2be2c83f67 8.31.4 2019-05-21 16:56:23 +01:00
j433866
a271eaabd0 Merge branch 'j433866-load-regex-fix' 2019-05-21 16:53:47 +01:00
h345983745
1d130c88a8 Inital commit 2019-05-19 15:40:17 +01:00
j433866
6f5018d45e Don't change option value if new value is blank 2019-05-16 11:59:25 +01:00
j433866
f51ee76c72 Merge pull request #3 from gchq/master
Bring up to date with gchq/master
2019-05-16 11:53:58 +01:00
j433866
0c9e8fe050 8.31.3 2019-05-09 17:07:12 +01:00
j433866
8f41571e47 Merge branch 'd98762625-use-node-lts' 2019-05-09 16:43:08 +01:00
j433866
6d14368e2f Merge branch 'use-node-lts' of https://github.com/d98762625/CyberChef into d98762625-use-node-lts 2019-05-09 16:42:19 +01:00
j433866
f90ad48906 Update dependencies 2019-05-09 16:20:42 +01:00
j433866
e95dac82c2 8.31.2 2019-05-09 11:56:27 +01:00
j433866
144601ffd4 Merge branch 'j433866-load-recipe-fix' 2019-05-09 11:54:18 +01:00
j433866
cbcc2aa731 Fix regex to handle multiple escaped backslashes 2019-05-09 10:04:06 +01:00
j433866
f9354c8cd1 Merge remote-tracking branch 'upstream/master' 2019-05-09 09:08:01 +01:00
d98762625
7a4f418e75 configure travis to use node LTS instead of latest 2019-04-29 17:10:40 +01:00
mshwed
8fa8e34027 Added support for parsing JSON with number type values. Added support for non-array JSON objects. Added extra tests for JSON to CSV operation. 2019-04-28 16:29:15 -04:00
mshwed
5225874498 Fixed handling of large files and fixed issue with line histogram colour fill 2019-04-28 14:38:03 -04:00
mshwed
802493fec4 Merge branch 'master' of https://github.com/gchq/CyberChef into feature/advanced-entropy 2019-04-24 19:48:03 -04:00
n1474335
01f0625d6a Fixed XSS in 'Text Encoding Brute Force. Closes #539 2019-04-14 22:00:17 +01:00
n1474335
38ff7ec89f 8.31.1 2019-04-14 21:56:01 +01:00
n1474335
7163a0802d Tidied up build directory 2019-04-14 21:55:52 +01:00
n1474335
8d0fcf37c5 8.31.0 2019-04-12 18:58:48 +01:00
n1474335
3da5a8bb34 Merge branch 'downloadable-zip' 2019-04-12 18:58:36 +01:00
n1474335
fad33b583b Updated CHANGELOG 2019-04-12 18:58:22 +01:00
n1474335
8f450501cc Downloadble version is now a .zip file instead of a single .htm file 2019-04-12 18:54:31 +01:00
n1474335
b3ae0e577a 8.30.1 2019-04-12 16:25:47 +01:00
n1474335
aedac94e40 Merge branch 'd98762625-init-dish' 2019-04-12 16:25:38 +01:00
n1474335
08c5dbce09 Merge branch 'init-dish' of https://github.com/d98762625/CyberChef into d98762625-init-dish 2019-04-12 16:24:23 +01:00
n1474335
482d658de7 8.30.0 2019-04-12 16:20:00 +01:00
n1474335
39e34081fc Updated CHANGELOG 2019-04-12 16:18:25 +01:00
n1474335
5797786a75 Merge branch 'feature-protobuf' 2019-04-12 16:16:03 +01:00
n1474335
f6977ea264 Added test for 'Decode Protobuf' operation 2019-04-12 16:13:10 +01:00
d98762625
cdc15c0f20 change empty Dish initialisation to use ArrayBuffer 2019-04-08 17:58:46 +01:00
Matt
18408901be removed old bzip2 dependency 2019-04-07 19:11:46 +01:00
Matt
982c915931 Change author 2019-04-07 19:02:27 +01:00
Matt
a339eacd45 Bzip2 compression support changed to use wasm backend
x4 speed.
2019-04-07 18:59:03 +01:00
mshwed
8fc0e012e3 Fixed formatting issues 2019-04-06 23:09:46 -04:00
mshwed
b7fb9635e5 Added operation for entropy visualization as an image 2019-04-06 19:40:07 -04:00
mshwed
f988a958bb Added support for generating an entropy curve based on the input data 2019-04-06 15:59:36 -04:00
mshwed
c80cb57b07 Added histogram line, refactored axes generation 2019-04-05 14:30:24 -04:00
mshwed
dec28e16d4 Added histogram visualization for text entropy 2019-04-05 11:12:44 -04:00
n1474335
525cb0689f Added 'Protobuf Decode', 'VarInt Decode' and 'VarInt Encode' operations 2019-04-02 17:27:14 +01:00
Matt
7796c473ae Fix lint issue 2019-04-02 17:01:47 +01:00
Matt
8445165491 Use all the arraybuffers
cuts a solid 1/3 off the compression time
2019-04-02 16:47:38 +01:00
Matt
c5698fcd65 Merge remote-tracking branch 'upstream/master' into features/bz2-comp 2019-04-02 16:26:29 +01:00
n1474335
786e50c3c3 8.29.1 2019-04-02 15:34:35 +01:00
n1474335
7d03be3a77 Dish._translate now uses ArrayBuffer as its intermediate type instead of byteArray. This should speed up operations on large files. 2019-04-02 15:34:30 +01:00
Matt
8b12caad78 Merge gchq/master into bz2-comp 2019-04-02 12:08:30 +01:00
Matt
e1492c3bb1 Added (non-garbage) description and fixed wikipedia link. 2019-04-02 12:05:17 +01:00
Matt
3cc66e9db9 Added Bzip2 compression support 2019-04-02 11:55:59 +01:00
n1474335
27677adbe8 Fixed option default 2019-04-01 00:29:10 +01:00
n1474335
6fa06a4f8b 8.29.0 2019-03-31 22:42:16 +01:00
n1474335
a3be4d2945 Merge branch 'h345983745-blake2' 2019-03-31 22:41:13 +01:00
n1474335
3dc5b5c31a Changed input type for BLAKE hashing ops to ArrayBuffer and tidied 2019-03-31 22:40:54 +01:00
n1474335
342e11f83e Merge branch 'blake2' of https://github.com/h345983745/CyberChef into h345983745-blake2 2019-03-31 22:25:40 +01:00
n1474335
b6d78b4001 8.28.0 2019-03-31 21:47:24 +01:00
n1474335
2aaa6db538 Updated CHANGELOG 2019-03-31 21:47:18 +01:00
n1474335
fc909d8199 Fix lint 2019-03-31 21:43:20 +01:00
n1474335
262136393b Merge branch 'artemisbot-module-charts' 2019-03-31 21:41:32 +01:00
n1474335
1640859542 Tidied up charts operations 2019-03-31 21:40:54 +01:00
n1474335
f0b48acaf9 Merge branch 'master' into artemisbot-module-charts 2019-03-31 21:21:08 +01:00
h345983745
bb8c305fc8 Updated description 2019-03-31 18:02:04 +00:00
h345983745
3950dba2c5 Added Key Option 2019-03-31 18:02:04 +00:00
h345983745
903ea45228 Update Generate All Hashes 2019-03-31 18:02:03 +00:00
h345983745
b116b8ba1e Added Output Encoding Option 2019-03-31 18:02:03 +00:00
h345983745
908043fb7f Added to Categories and updated info URL's 2019-03-31 18:02:03 +00:00
h345983745
2d7e3f180e Added to "Generate All Hashes" operation 2019-03-31 18:02:03 +00:00
h345983745
c813d17595 Fixed Author Tags 2019-03-31 18:02:03 +00:00
h345983745
7d16265c4e Initial Commit – Working hash functionality 2019-03-31 18:02:02 +00:00
n1474335
4fb4764d3f 8.27.2 2019-03-30 14:56:53 +00:00
n1474335
2385f1cbf8 Updated dependencies 2019-03-30 14:56:43 +00:00
n1474335
3d80d66925 Merge branch 'module-charts' of https://github.com/artemisbot/CyberChef into artemisbot-module-charts 2019-03-27 23:17:30 +00:00
j433866
328c0ade22 Merge remote-tracking branch 'upstream/master' 2019-03-19 14:21:28 +00:00
Matt
b3d92b04cb Updated nodom dependency to upstream 2019-03-19 11:24:29 +00:00
Matt
3ad5f889a0 Wrote some tests, fixed imports for node 2019-03-14 13:37:11 +00:00
Matt
768fef502d Changed version of nodom to actually functioning fork 2019-03-14 11:39:46 +00:00
j433866
0a7a0ac681 Merge pull request #1 from gchq/master
bring up to date with master
2019-03-12 13:27:24 +00:00
Matt
cd22985f11 Fix categories JSON issue 2019-03-11 12:09:29 +00:00
Matt
66c0425080 Merge branch 'master' into module-charts 2019-03-11 11:57:19 +00:00
Matt
fd7fd9ca35 Remove jsdom from dependencies 2019-03-11 11:55:44 +00:00
Matt
ca6d472e5d Update nodom 2019-03-10 16:07:14 +00:00
Matt
6501454424 Cleanup 2019-03-10 13:08:47 +00:00
Matt
0019a4e1db Found a different dom implementation that removes 6MB 2019-03-10 12:03:53 +00:00
Matt
f8874fc586 Actually made operations work (and made the module 8MB)
Unfortunately they need jsdom
2019-03-10 11:44:02 +00:00
Matt
4ae875601a Ported final two chart operations 2019-03-10 10:33:47 +00:00
Matt
da2d5674a5 Ported heatmap and hex density chart ops 2019-02-23 00:41:19 +00:00
Matt
5bb8eb22ec Merge branch 'master' into module-charts 2019-02-22 23:36:14 +00:00
toby
6784a1c027 Add Series chart operation 2017-06-20 15:25:16 -04:00
toby
39ab600887 Add scatter plot operation 2017-06-06 14:08:21 -04:00
toby
49ea532cdc Tweak extent of hex density charts 2017-06-06 09:46:46 -04:00
toby
247e9bfbde Add "HTML to Text" operation 2017-06-05 21:47:32 -04:00
toby
5944568565 Change margins in hex density chart 2017-06-05 10:24:15 -04:00
toby
1c87707a76 Add heatmap chart operation 2017-06-05 10:24:06 -04:00
toby
b4188db671 Hexagon density: allow dense plotting of hexagons 2017-05-31 15:00:57 -04:00
toby
dc642be1f5 Hex plot: add edge drawing & changing colour opts 2017-05-30 15:49:22 -04:00
toby
6cdc7d3966 Hex density: split radius into draw & pack radii 2017-05-30 15:24:23 -04:00
toby
281d558111 Add hex density chart 2017-05-30 14:53:32 -04:00
toby
fa89713f19 Add d3 as a dependency 2017-05-30 14:50:56 -04:00
59 changed files with 6539 additions and 4307 deletions

View File

@@ -1,14 +1 @@
<!-- Prefix the title above with one of the following: -->
<!-- Bug report: -->
<!-- Operation request: -->
<!-- Feature request: -->
<!-- Misc: -->
### Summary
### Example
<!-- If describing a bug, tell us what happens instead of the expected behavior -->
<!-- Include a link that triggers the bug if possible -->
<!-- If you are requesting a new operation, include example input and output -->
<!-- Prefix the title above with 'Misc:' -->

35
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,35 @@
---
name: Bug report
about: Create a report to help us improve
title: 'Bug report: <Insert title here>'
labels: bug
assignees: ''
---
<!-- Prefix the title above with 'Bug report:' -->
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior or a link to the recipe / input used to cause the bug:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (if relevant, please complete the following information):**
- OS: [e.g. Windows]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

View File

@@ -0,0 +1,20 @@
---
name: Feature request
about: Suggest an idea for the project
title: 'Feature request: <Insert title here>'
labels: feature
assignees: ''
---
<!-- Prefix the title above with 'Feature request:' -->
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -0,0 +1,16 @@
---
name: Operation request
about: Suggest a new operation
title: 'Operation request: <Insert title here>'
labels: operation
assignees: ''
---
<!-- Prefix the title above with 'Operation request:' -->
## Summary
### Example Input
### Example Output

View File

@@ -1,6 +1,6 @@
language: node_js
node_js:
- node
- lts/*
addons:
chrome: stable
install: npm install
@@ -30,8 +30,9 @@ deploy:
skip_cleanup: true
api_key:
secure: "HV1WSKv4l/0Y2bKKs1iBJocBcmLj08PCRUeEM/jTwA4jqJ8EiLHWiXtER/D5sEg2iibRVKd2OQjfrmS6bo4AiwdeVgAKmv0FtS2Jw+391N8Nd5AkEANHa5Om/IpHLTL2YRAjpJTsDpY72bMUTJIwjQA3TFJkgrpOw6KYfohOcgbxLpZ4XuNJRU3VL4Hsxdv5V9aOVmfFOmMOVPQlakXy7NgtW5POp1f2WJwgcZxylkR1CjwaqMyXmSoVl46pyH3tr5+dptsQoKSGdi6sIHGA60oDotFPcm+0ifa47wZw+vapuuDi4tdNxhrHGaDMG8xiE0WFDHwQUDlk2/+W7j9SEX0H3Em7us371JXRp56EDwEcDa34VpVkC6i8HGcHK55hnxVbMZXGf3qhOFD8wY7qMbjMRvIpucrMHBi86OfkDfv0vDj2LyvIl5APj/AX50BrE0tfH1MZbH26Jkx4NdlkcxQ14GumarmUqfmVvbX/fsoA6oUuAAE9ZgRRi3KHO4wci6KUcRfdm+XOeUkaBFsL86G3EEYIvrtBTuaypdz+Cx7nd1iPZyWMx5Y1gXnVzha4nBdV4+7l9JIsFggD8QVpw2uHXQiS1KXFjOeqA3DBD8tjMB7q26Fl2fD3jkOo4BTbQ2NrRIZUu/iL+fOmMPsyMt2qulB0yaSBCfkbEq8xrUA="
file_glob: true
file:
- build/prod/cyberchef.htm
- build/prod/*.zip
- build/node/CyberChef.js
on:
repo: gchq/CyberChef

View File

@@ -2,6 +2,28 @@
All major and minor version changes will be documented in this file. Details of patch-level version changes can be found in [commit messages](https://github.com/gchq/CyberChef/commits/master).
### [8.34.0] - 2019-06-28
- Various new visualisations added to the 'Entropy' operation [@MShwed] | [#535]
- Efficiency improvements made to the 'Entropy' operation for large file support [@n1474335]
### [8.33.0] - 2019-06-27
- 'Bzip2 Compress' operation added and 'Bzip2 Decompress' operation greatly improved [@artemisbot] | [#531]
### [8.32.0] - 2019-06-27
- 'Indec of Coincidence' operation added [@Ge0rg3] | [#571]
### [8.31.0] - 2019-04-12
- The downloadable version of CyberChef is now a .zip file containing separate modules rather than a single .htm file. It is still completely standalone and will not make any external network requests. This change reduces the complexity of the build process significantly. [@n1474335]
### [8.30.0] - 2019-04-12
- 'Decode Protobuf' operation added [@n1474335] | [#533]
### [8.29.0] - 2019-03-31
- 'BLAKE2s' and 'BLAKE2b' hashing operations added [@h345983745] | [#525]
### [8.28.0] - 2019-03-31
- 'Heatmap Chart', 'Hex Density Chart', 'Scatter Chart' and 'Series Chart' operation added [@artemisbot] [@tlwr] | [#496] [#143]
### [8.27.0] - 2019-03-14
- 'Enigma', 'Typex', 'Bombe' and 'Multiple Bombe' operations added [@s2224834] | [#516]
- See [this wiki article](https://github.com/gchq/CyberChef/wiki/Enigma,-the-Bombe,-and-Typex) for a full explanation of these operations.
@@ -118,6 +140,13 @@ All major and minor version changes will be documented in this file. Details of
[8.34.0]: https://github.com/gchq/CyberChef/releases/tag/v8.34.0
[8.33.0]: https://github.com/gchq/CyberChef/releases/tag/v8.33.0
[8.32.0]: https://github.com/gchq/CyberChef/releases/tag/v8.32.0
[8.31.0]: https://github.com/gchq/CyberChef/releases/tag/v8.31.0
[8.30.0]: https://github.com/gchq/CyberChef/releases/tag/v8.30.0
[8.29.0]: https://github.com/gchq/CyberChef/releases/tag/v8.29.0
[8.28.0]: https://github.com/gchq/CyberChef/releases/tag/v8.28.0
[8.27.0]: https://github.com/gchq/CyberChef/releases/tag/v8.27.0
[8.26.0]: https://github.com/gchq/CyberChef/releases/tag/v8.26.0
[8.25.0]: https://github.com/gchq/CyberChef/releases/tag/v8.25.0
@@ -159,6 +188,7 @@ All major and minor version changes will be documented in this file. Details of
[@h345983745]: https://github.com/h345983745
[@s2224834]: https://github.com/s2224834
[@artemisbot]: https://github.com/artemisbot
[@tlwr]: https://github.com/tlwr
[@picapi]: https://github.com/picapi
[@Dachande663]: https://github.com/Dachande663
[@JustAnotherMark]: https://github.com/JustAnotherMark
@@ -172,9 +202,12 @@ All major and minor version changes will be documented in this file. Details of
[@Cynser]: https://github.com/Cynser
[@anthony-arnold]: https://github.com/anthony-arnold
[@masq]: https://github.com/masq
[@Ge0rg3]: https://github.com/Ge0rg3
[@MShwed]: https://github.com/MShwed
[#95]: https://github.com/gchq/CyberChef/pull/299
[#173]: https://github.com/gchq/CyberChef/pull/173
[#143]: https://github.com/gchq/CyberChef/pull/143
[#224]: https://github.com/gchq/CyberChef/pull/224
[#239]: https://github.com/gchq/CyberChef/pull/239
[#248]: https://github.com/gchq/CyberChef/pull/248
@@ -209,5 +242,11 @@ All major and minor version changes will be documented in this file. Details of
[#468]: https://github.com/gchq/CyberChef/pull/468
[#476]: https://github.com/gchq/CyberChef/pull/476
[#489]: https://github.com/gchq/CyberChef/pull/489
[#496]: https://github.com/gchq/CyberChef/pull/496
[#506]: https://github.com/gchq/CyberChef/pull/506
[#516]: https://github.com/gchq/CyberChef/pull/516
[#525]: https://github.com/gchq/CyberChef/pull/525
[#531]: https://github.com/gchq/CyberChef/pull/531
[#533]: https://github.com/gchq/CyberChef/pull/533
[#535]: https://github.com/gchq/CyberChef/pull/535
[#571]: https://github.com/gchq/CyberChef/pull/571

View File

@@ -4,7 +4,6 @@ const webpack = require("webpack");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const BundleAnalyzerPlugin = require("webpack-bundle-analyzer").BundleAnalyzerPlugin;
const NodeExternals = require("webpack-node-externals");
const Inliner = require("web-resource-inliner");
const glob = require("glob");
const path = require("path");
@@ -43,18 +42,16 @@ module.exports = function (grunt) {
grunt.registerTask("prod",
"Creates a production-ready build. Use the --msg flag to add a compile message.",
["eslint", "clean:prod", "clean:config", "exec:generateConfig", "webpack:web", "inline", "chmod"]);
[
"eslint", "clean:prod", "clean:config", "exec:generateConfig", "webpack:web",
"copy:standalone", "zip:standalone", "clean:standalone", "chmod"
]);
grunt.registerTask("default",
"Lints the code base",
["eslint", "exec:repoSize"]);
grunt.registerTask("inline",
"Compiles a production build of CyberChef into a single, portable web page.",
["exec:generateConfig", "webpack:webInline", "runInliner", "clean:inlineScripts"]);
grunt.registerTask("runInliner", runInliner);
grunt.registerTask("doc", "docs");
grunt.registerTask("tests", "test");
grunt.registerTask("lint", "eslint");
@@ -72,6 +69,7 @@ module.exports = function (grunt) {
grunt.loadNpmTasks("grunt-accessibility");
grunt.loadNpmTasks("grunt-concurrent");
grunt.loadNpmTasks("grunt-contrib-connect");
grunt.loadNpmTasks("grunt-zip");
// Project configuration
@@ -94,32 +92,6 @@ module.exports = function (grunt) {
},
moduleEntryPoints = listEntryModules();
/**
* Compiles a production build of CyberChef into a single, portable web page.
*/
function runInliner() {
const done = this.async();
Inliner.html({
relativeTo: "build/prod/",
fileContent: grunt.file.read("build/prod/cyberchef.htm"),
images: true,
svgs: true,
scripts: true,
links: true,
strict: true
}, function(error, result) {
if (error) {
if (error instanceof Error) {
done(error);
} else {
done(new Error(error));
}
} else {
grunt.file.write("build/prod/cyberchef.htm", result);
done(true);
}
});
}
/**
* Generates an entry list for all the modules.
@@ -130,7 +102,7 @@ module.exports = function (grunt) {
glob.sync("./src/core/config/modules/*.mjs").forEach(file => {
const basename = path.basename(file);
if (basename !== "Default.mjs" && basename !== "OpModules.mjs")
entryModules[basename.split(".mjs")[0]] = path.resolve(file);
entryModules["modules/" + basename.split(".mjs")[0]] = path.resolve(file);
});
return entryModules;
@@ -143,7 +115,7 @@ module.exports = function (grunt) {
node: ["build/node/*"],
config: ["src/core/config/OperationConfig.json", "src/core/config/modules/*", "src/code/operations/index.mjs"],
docs: ["docs/*", "!docs/*.conf.json", "!docs/*.ico", "!docs/*.png"],
inlineScripts: ["build/prod/scripts.js"],
standalone: ["build/prod/CyberChef*.html"]
},
eslint: {
options: {
@@ -195,6 +167,9 @@ module.exports = function (grunt) {
}, moduleEntryPoints),
output: {
path: __dirname + "/build/prod",
filename: chunkData => {
return chunkData.chunk.name === "main" ? "assets/[name].js": "[name].js";
},
globalObject: "this"
},
resolve: {
@@ -225,33 +200,6 @@ module.exports = function (grunt) {
]
};
},
webInline: {
mode: "production",
target: "web",
entry: "./src/web/index.js",
output: {
filename: "scripts.js",
path: __dirname + "/build/prod"
},
plugins: [
new webpack.DefinePlugin(Object.assign({}, BUILD_CONSTANTS, {
INLINE: "true"
})),
new HtmlWebpackPlugin({
filename: "cyberchef.htm",
template: "./src/web/html/index.html",
compileTime: compileTime,
version: pkg.version + "s",
inline: true,
minify: {
removeComments: true,
collapseWhitespace: true,
minifyJS: true,
minifyCSS: true
}
}),
]
},
node: {
mode: "production",
target: "node",
@@ -284,7 +232,8 @@ module.exports = function (grunt) {
warningsFilter: [
/source-map/,
/dependency is an expression/,
/export 'default'/
/export 'default'/,
/Can't resolve 'sodium'/
],
}
},
@@ -316,6 +265,18 @@ module.exports = function (grunt) {
}
}
},
zip: {
standalone: {
cwd: "build/prod/",
src: [
"build/prod/**/*",
"!build/prod/index.html",
"!build/prod/BundleAnalyzerReport.html",
"!build/prod/sitemap.js"
],
dest: `build/prod/CyberChef_v${pkg.version}.zip`
}
},
connect: {
prod: {
options: {
@@ -328,10 +289,16 @@ module.exports = function (grunt) {
ghPages: {
options: {
process: function (content, srcpath) {
// Add Google Analytics code to index.html
if (srcpath.indexOf("index.html") >= 0) {
// Add Google Analytics code to index.html
content = content.replace("</body></html>",
grunt.file.read("src/web/static/ga.html") + "</body></html>");
// Add Structured Data for SEO
content = content.replace("</head>",
"<script type='application/ld+json'>" +
JSON.stringify(JSON.parse(grunt.file.read("src/web/static/structuredData.json"))) +
"</script></head>");
return grunt.template.process(content, srcpath);
} else {
return content;
@@ -350,6 +317,28 @@ module.exports = function (grunt) {
dest: "build/prod/"
},
]
},
standalone: {
options: {
process: function (content, srcpath) {
if (srcpath.indexOf("index.html") >= 0) {
// Replace download link with version number
content = content.replace(/<a [^>]+>Download CyberChef.+?<\/a>/,
`<span>Version ${pkg.version}</span>`);
return grunt.template.process(content, srcpath);
} else {
return content;
}
},
noProcess: ["**", "!**/*.html"]
},
files: [
{
src: "build/prod/index.html",
dest: `build/prod/CyberChef_v${pkg.version}.html`
}
]
}
},
chmod: {
@@ -405,7 +394,7 @@ module.exports = function (grunt) {
command: "node --experimental-modules --no-warnings --no-deprecation tests/operations/index.mjs"
},
browserTests: {
command: "./node_modules/.bin/nightwatch --env prod,inline"
command: "./node_modules/.bin/nightwatch --env prod"
}
},
});

View File

@@ -11,14 +11,22 @@ module.exports = function(api) {
"node": "6.5"
},
"modules": false,
"useBuiltIns": "entry"
"useBuiltIns": "entry",
"corejs": 3
}]
],
"plugins": [
"babel-plugin-syntax-dynamic-import",
["babel-plugin-transform-builtin-extend", {
"globals": ["Error"]
}]
[
"babel-plugin-transform-builtin-extend", {
"globals": ["Error"]
}
],
[
"@babel/plugin-transform-runtime", {
"regenerator": true
}
]
]
};
};

View File

@@ -23,10 +23,6 @@
"prod": {
"launch_url": "http://localhost:8000/index.html"
},
"inline": {
"launch_url": "http://localhost:8000/cyberchef.htm"
}
}

7163
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "cyberchef",
"version": "8.27.1",
"version": "8.34.0",
"description": "The Cyber Swiss Army Knife for encryption, encoding, compression and data analysis.",
"author": "n1474335 <n1474335@gmail.com>",
"homepage": "https://gchq.github.io/CyberChef",
@@ -29,108 +29,122 @@
},
"main": "build/node/CyberChef.js",
"bugs": "https://github.com/gchq/CyberChef/issues",
"browserslist": [
"Chrome >= 40",
"Firefox >= 35",
"Edge >= 14",
"node >= 6.5"
],
"devDependencies": {
"@babel/core": "^7.2.2",
"@babel/preset-env": "^7.2.3",
"autoprefixer": "^9.4.3",
"babel-eslint": "^10.0.1",
"babel-loader": "^8.0.4",
"@babel/core": "^7.4.5",
"@babel/plugin-transform-runtime": "^7.4.4",
"@babel/preset-env": "^7.4.5",
"autoprefixer": "^9.6.0",
"babel-eslint": "^10.0.2",
"babel-loader": "^8.0.6",
"babel-plugin-syntax-dynamic-import": "^6.18.0",
"bootstrap": "^4.2.1",
"chromedriver": "^2.45.0",
"chromedriver": "^75.0.0",
"colors": "^1.3.3",
"css-loader": "^2.1.0",
"eslint": "^5.12.1",
"css-loader": "^3.0.0",
"eslint": "^6.0.1",
"exports-loader": "^0.7.0",
"file-loader": "^3.0.1",
"grunt": "^1.0.3",
"file-loader": "^4.0.0",
"grunt": "^1.0.4",
"grunt-accessibility": "~6.0.0",
"grunt-chmod": "~1.1.1",
"grunt-concurrent": "^2.3.1",
"grunt-concurrent": "^3.0.0",
"grunt-contrib-clean": "~2.0.0",
"grunt-contrib-connect": "^2.0.0",
"grunt-contrib-copy": "~1.0.0",
"grunt-contrib-watch": "^1.1.0",
"grunt-eslint": "^21.0.0",
"grunt-eslint": "^21.1.0",
"grunt-exec": "~3.0.0",
"grunt-jsdoc": "^2.3.0",
"grunt-jsdoc": "^2.4.0",
"grunt-webpack": "^3.1.3",
"grunt-zip": "^0.18.2",
"html-webpack-plugin": "^3.2.0",
"imports-loader": "^0.8.0",
"ink-docstrap": "^1.3.2",
"jsdoc-babel": "^0.5.0",
"mini-css-extract-plugin": "^0.5.0",
"nightwatch": "^1.0.18",
"node-sass": "^4.11.0",
"postcss-css-variables": "^0.11.0",
"mini-css-extract-plugin": "^0.7.0",
"nightwatch": "^1.1.12",
"node-sass": "^4.12.0",
"postcss-css-variables": "^0.13.0",
"postcss-import": "^12.0.1",
"postcss-loader": "^3.0.0",
"prompt": "^1.0.0",
"sass-loader": "^7.1.0",
"sitemap": "^2.1.0",
"sitemap": "^2.2.0",
"style-loader": "^0.23.1",
"svg-url-loader": "^2.3.2",
"url-loader": "^1.1.2",
"web-resource-inliner": "^4.2.1",
"webpack": "^4.28.3",
"webpack-bundle-analyzer": "^3.0.3",
"webpack-dev-server": "^3.1.14",
"svg-url-loader": "^2.3.3",
"url-loader": "^2.0.1",
"webpack": "^4.35.0",
"webpack-bundle-analyzer": "^3.3.2",
"webpack-dev-server": "^3.7.2",
"webpack-node-externals": "^1.7.2",
"worker-loader": "^2.0.0"
},
"dependencies": {
"@babel/polyfill": "^7.4.4",
"@babel/runtime": "^7.4.5",
"arrive": "^2.4.1",
"babel-plugin-transform-builtin-extend": "1.1.2",
"babel-polyfill": "^6.26.0",
"bcryptjs": "^2.4.3",
"bignumber.js": "^8.0.2",
"bignumber.js": "^9.0.0",
"blakejs": "^1.1.0",
"bootstrap": "4.3.1",
"bootstrap-colorpicker": "^2.5.3",
"bootstrap-material-design": "^4.1.1",
"bson": "^4.0.1",
"bootstrap-material-design": "^4.1.2",
"bson": "^4.0.2",
"chi-squared": "^1.1.0",
"clippyjs": "0.0.3",
"core-js": "^3.1.4",
"crypto-api": "^0.8.3",
"crypto-js": "^3.1.9-1",
"ctph.js": "0.0.5",
"diff": "^3.5.0",
"d3": "^5.9.4",
"d3-hexbin": "^0.2.2",
"diff": "^4.0.1",
"es6-promisify": "^6.0.1",
"escodegen": "^1.11.0",
"escodegen": "^1.11.1",
"esmangle": "^1.0.1",
"esprima": "^4.0.1",
"exif-parser": "^0.1.12",
"file-saver": "^2.0.0",
"file-saver": "^2.0.2",
"geodesy": "^1.1.3",
"highlight.js": "^9.13.1",
"jimp": "^0.6.0",
"jquery": "^3.3.1",
"highlight.js": "^9.15.8",
"jimp": "^0.6.4",
"jquery": "3.4.1",
"js-crc": "^0.2.0",
"js-sha3": "^0.8.0",
"jsesc": "^2.5.2",
"jsonpath": "^1.0.0",
"jsonwebtoken": "^8.4.0",
"jsqr": "^1.1.1",
"jsonpath": "^1.0.2",
"jsonwebtoken": "^8.5.1",
"jsqr": "^1.2.0",
"jsrsasign": "8.0.12",
"kbpgp": "^2.0.82",
"kbpgp": "2.1.2",
"libbzip2-wasm": "0.0.4",
"libyara-wasm": "0.0.12",
"lodash": "^4.17.11",
"loglevel": "^1.6.1",
"loglevel": "^1.6.3",
"loglevel-message-prefix": "^3.0.0",
"moment": "^2.23.0",
"moment-timezone": "^0.5.23",
"moment": "^2.24.0",
"moment-timezone": "^0.5.25",
"ngeohash": "^0.6.3",
"node-forge": "^0.7.6",
"node-forge": "^0.8.5",
"node-md6": "^0.1.0",
"nodom": "^2.2.0",
"notepack.io": "^2.2.0",
"nwmatcher": "^1.4.4",
"otp": "^0.1.3",
"popper.js": "^1.14.6",
"popper.js": "^1.15.0",
"qr-image": "^3.2.0",
"scryptsy": "^2.0.0",
"scryptsy": "^2.1.0",
"snackbarjs": "^1.1.0",
"sortablejs": "^1.8.0-rc1",
"split.js": "^1.5.10",
"sortablejs": "^1.9.0",
"split.js": "^1.5.11",
"ssdeep.js": "0.0.2",
"ua-parser-js": "^0.7.19",
"ua-parser-js": "^0.7.20",
"utf8": "^3.0.0",
"vkbeautify": "^0.99.3",
"xmldom": "^0.1.27",

View File

@@ -1,13 +1,7 @@
module.exports = {
plugins: [
require("postcss-import"),
require("autoprefixer")({
browsers: [
"Chrome >= 40",
"Firefox >= 35",
"Edge >= 14"
]
}),
require("autoprefixer"),
require("postcss-css-variables")({
preserve: true
}),

View File

@@ -6,7 +6,6 @@
* @license Apache-2.0
*/
import "babel-polyfill";
import Chef from "./Chef";
import OperationConfig from "./config/OperationConfig.json";
import OpModules from "./config/modules/OpModules";
@@ -179,7 +178,7 @@ self.loadRequiredModules = function(recipeConfig) {
if (!OpModules.hasOwnProperty(module)) {
log.info(`Loading ${module} module`);
self.sendStatusMessage(`Loading ${module} module`);
self.importScripts(`${self.docURL}/${module}.js`);
self.importScripts(`${self.docURL}/modules/${module}.js`);
self.sendStatusMessage("");
}
});

View File

@@ -21,8 +21,8 @@ class Dish {
* @param {Dish} [dish=null] - A dish to clone
*/
constructor(dish=null) {
this.value = [];
this.type = Dish.BYTE_ARRAY;
this.value = new ArrayBuffer(0);
this.type = Dish.ARRAY_BUFFER;
if (dish &&
dish.hasOwnProperty("value") &&
@@ -149,78 +149,75 @@ class Dish {
*/
async _translate(toType, notUTF8=false) {
log.debug(`Translating Dish from ${Dish.enumLookup(this.type)} to ${Dish.enumLookup(toType)}`);
const byteArrayToStr = notUTF8 ? Utils.byteArrayToChars : Utils.byteArrayToUtf8;
// Convert data to intermediate byteArray type
// Convert data to intermediate ArrayBuffer type
try {
switch (this.type) {
case Dish.STRING:
this.value = this.value ? Utils.strToByteArray(this.value) : [];
this.value = this.value ? Utils.strToArrayBuffer(this.value) : new ArrayBuffer;
break;
case Dish.NUMBER:
this.value = typeof this.value === "number" ? Utils.strToByteArray(this.value.toString()) : [];
this.value = typeof this.value === "number" ? Utils.strToArrayBuffer(this.value.toString()) : new ArrayBuffer;
break;
case Dish.HTML:
this.value = this.value ? Utils.strToByteArray(Utils.unescapeHtml(Utils.stripHtmlTags(this.value, true))) : [];
this.value = this.value ? Utils.strToArrayBuffer(Utils.unescapeHtml(Utils.stripHtmlTags(this.value, true))) : new ArrayBuffer;
break;
case Dish.ARRAY_BUFFER:
// Array.from() would be nicer here, but it's slightly slower
this.value = Array.prototype.slice.call(new Uint8Array(this.value));
case Dish.BYTE_ARRAY:
this.value = new Uint8Array(this.value).buffer;
break;
case Dish.BIG_NUMBER:
this.value = BigNumber.isBigNumber(this.value) ? Utils.strToByteArray(this.value.toFixed()) : [];
this.value = BigNumber.isBigNumber(this.value) ? Utils.strToArrayBuffer(this.value.toFixed()) : new ArrayBuffer;
break;
case Dish.JSON:
this.value = this.value ? Utils.strToByteArray(JSON.stringify(this.value, null, 4)) : [];
this.value = this.value ? Utils.strToArrayBuffer(JSON.stringify(this.value, null, 4)) : new ArrayBuffer;
break;
case Dish.FILE:
this.value = await Utils.readFile(this.value);
this.value = Array.prototype.slice.call(this.value);
this.value = (await Utils.readFile(this.value)).buffer;
break;
case Dish.LIST_FILE:
this.value = await Promise.all(this.value.map(async f => Utils.readFile(f)));
this.value = this.value.map(b => Array.prototype.slice.call(b));
this.value = [].concat.apply([], this.value);
this.value = concatenateTypedArrays(...this.value).buffer;
break;
default:
break;
}
} catch (err) {
throw new DishError(`Error translating from ${Dish.enumLookup(this.type)} to byteArray: ${err}`);
throw new DishError(`Error translating from ${Dish.enumLookup(this.type)} to ArrayBuffer: ${err}`);
}
this.type = Dish.BYTE_ARRAY;
this.type = Dish.ARRAY_BUFFER;
// Convert from byteArray to toType
// Convert from ArrayBuffer to toType
try {
switch (toType) {
case Dish.STRING:
case Dish.HTML:
this.value = this.value ? byteArrayToStr(this.value) : "";
this.value = this.value ? Utils.arrayBufferToStr(this.value, !notUTF8) : "";
this.type = Dish.STRING;
break;
case Dish.NUMBER:
this.value = this.value ? parseFloat(byteArrayToStr(this.value)) : 0;
this.value = this.value ? parseFloat(Utils.arrayBufferToStr(this.value, !notUTF8)) : 0;
this.type = Dish.NUMBER;
break;
case Dish.ARRAY_BUFFER:
this.value = new Uint8Array(this.value).buffer;
case Dish.BYTE_ARRAY:
this.value = Array.prototype.slice.call(new Uint8Array(this.value));
this.type = Dish.ARRAY_BUFFER;
break;
case Dish.BIG_NUMBER:
try {
this.value = new BigNumber(byteArrayToStr(this.value));
this.value = new BigNumber(Utils.arrayBufferToStr(this.value, !notUTF8));
} catch (err) {
this.value = new BigNumber(NaN);
}
this.type = Dish.BIG_NUMBER;
break;
case Dish.JSON:
this.value = JSON.parse(byteArrayToStr(this.value));
this.value = JSON.parse(Utils.arrayBufferToStr(this.value, !notUTF8));
this.type = Dish.JSON;
break;
case Dish.FILE:
this.value = new File(this.value, "unknown");
this.type = Dish.FILE;
break;
case Dish.LIST_FILE:
this.value = [new File(this.value, "unknown")];
@@ -230,7 +227,7 @@ class Dish {
break;
}
} catch (err) {
throw new DishError(`Error translating from byteArray to ${Dish.enumLookup(toType)}: ${err}`);
throw new DishError(`Error translating from ArrayBuffer to ${Dish.enumLookup(toType)}: ${err}`);
}
}
@@ -374,6 +371,26 @@ class Dish {
}
/**
* Concatenates a list of Uint8Arrays together
*
* @param {Uint8Array[]} arrays
* @returns {Uint8Array}
*/
function concatenateTypedArrays(...arrays) {
let totalLength = 0;
for (const arr of arrays) {
totalLength += arr.length;
}
const result = new Uint8Array(totalLength);
let offset = 0;
for (const arr of arrays) {
result.set(arr, offset);
offset += arr.length;
}
return result;
}
/**
* Dish data type enum for byte arrays.

View File

@@ -201,11 +201,18 @@ class Utils {
* Utils.parseEscapedChars("\\n");
*/
static parseEscapedChars(str) {
return str.replace(/(\\)?\\([bfnrtv0'"]|x[\da-fA-F]{2}|u[\da-fA-F]{4}|u\{[\da-fA-F]{1,6}\})/g, function(m, a, b) {
return str.replace(/(\\)?\\([bfnrtv'"]|[0-3][0-7]{2}|[0-7]{1,2}|x[\da-fA-F]{2}|u[\da-fA-F]{4}|u\{[\da-fA-F]{1,6}\})/g, function(m, a, b) {
if (a === "\\") return "\\"+b;
switch (b[0]) {
case "0":
return "\0";
case "1":
case "2":
case "3":
case "4":
case "5":
case "6":
case "7":
return String.fromCharCode(parseInt(b, 8));
case "b":
return "\b";
case "t":
@@ -367,6 +374,61 @@ class Utils {
}
/**
* Converts a string to an ArrayBuffer.
* Treats the string as UTF-8 if any values are over 255.
*
* @param {string} str
* @returns {ArrayBuffer}
*
* @example
* // returns [72,101,108,108,111]
* Utils.strToArrayBuffer("Hello");
*
* // returns [228,189,160,229,165,189]
* Utils.strToArrayBuffer("你好");
*/
static strToArrayBuffer(str) {
const arr = new Uint8Array(str.length);
let i = str.length, b;
while (i--) {
b = str.charCodeAt(i);
arr[i] = b;
// If any of the bytes are over 255, read as UTF-8
if (b > 255) return Utils.strToUtf8ArrayBuffer(str);
}
return arr.buffer;
}
/**
* Converts a string to a UTF-8 ArrayBuffer.
*
* @param {string} str
* @returns {ArrayBuffer}
*
* @example
* // returns [72,101,108,108,111]
* Utils.strToUtf8ArrayBuffer("Hello");
*
* // returns [228,189,160,229,165,189]
* Utils.strToUtf8ArrayBuffer("你好");
*/
static strToUtf8ArrayBuffer(str) {
const utf8Str = utf8.encode(str);
if (str.length !== utf8Str.length) {
if (ENVIRONMENT_IS_WORKER()) {
self.setOption("attemptHighlight", false);
} else if (ENVIRONMENT_IS_WEB()) {
window.app.options.attemptHighlight = false;
}
}
return Utils.strToArrayBuffer(utf8Str);
}
/**
* Converts a string to a byte array.
* Treats the string as UTF-8 if any values are over 255.
@@ -459,7 +521,7 @@ class Utils {
/**
* Attempts to convert a byte array to a UTF-8 string.
*
* @param {byteArray} byteArray
* @param {byteArray|Uint8Array} byteArray
* @returns {string}
*
* @example
@@ -505,6 +567,7 @@ class Utils {
static byteArrayToChars(byteArray) {
if (!byteArray) return "";
let str = "";
// String concatenation appears to be faster than an array join
for (let i = 0; i < byteArray.length;) {
str += String.fromCharCode(byteArray[i++]);
}
@@ -524,8 +587,8 @@ class Utils {
* Utils.arrayBufferToStr(Uint8Array.from([104,101,108,108,111]).buffer);
*/
static arrayBufferToStr(arrayBuffer, utf8=true) {
const byteArray = Array.prototype.slice.call(new Uint8Array(arrayBuffer));
return utf8 ? Utils.byteArrayToUtf8(byteArray) : Utils.byteArrayToChars(byteArray);
const arr = new Uint8Array(arrayBuffer);
return utf8 ? Utils.byteArrayToUtf8(arr) : Utils.byteArrayToChars(arr);
}
@@ -780,7 +843,7 @@ class Utils {
args = m[2]
.replace(/"/g, '\\"') // Escape double quotes
.replace(/(^|,|{|:)'/g, '$1"') // Replace opening ' with "
.replace(/([^\\]|[^\\]\\\\)'(,|:|}|$)/g, '$1"$2') // Replace closing ' with "
.replace(/([^\\]|(?:\\\\)+)'(,|:|}|$)/g, '$1"$2') // Replace closing ' with "
.replace(/\\'/g, "'"); // Unescape single quotes
args = "[" + args + "]";
@@ -1027,6 +1090,7 @@ class Utils {
"Comma": ",",
"Semi-colon": ";",
"Colon": ":",
"Tab": "\t",
"Line feed": "\n",
"CRLF": "\r\n",
"Forward slash": "/",

View File

@@ -169,6 +169,9 @@
"Parse URI",
"URL Encode",
"URL Decode",
"Protobuf Decode",
"VarInt Encode",
"VarInt Decode",
"Format MAC addresses",
"Change IP format",
"Group IP addresses",
@@ -297,6 +300,8 @@
"HAS-160",
"Whirlpool",
"Snefru",
"BLAKE2b",
"BLAKE2s",
"SSDEEP",
"CTPH",
"Compare SSDEEP hashes",
@@ -378,7 +383,11 @@
"Image Filter",
"Contain Image",
"Cover Image",
"Image Hue/Saturation/Lightness"
"Image Hue/Saturation/Lightness",
"Hex Density chart",
"Scatter chart",
"Series chart",
"Heatmap chart"
]
},
{
@@ -386,6 +395,7 @@
"ops": [
"Entropy",
"Frequency distribution",
"Index of Coincidence",
"Chi Square",
"Disassemble x86",
"Pseudo-Random Number Generator",
@@ -395,6 +405,7 @@
"Generate QR Code",
"Parse QR Code",
"Haversine distance",
"HTML To Text",
"Generate Lorem Ipsum",
"Numberwang",
"XKCD Random Number"

178
src/core/lib/Charts.mjs Normal file
View File

@@ -0,0 +1,178 @@
/**
* @author tlwr [toby@toby.codes]
* @author Matt C [me@mitt.dev]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import OperationError from "../errors/OperationError";
/**
* @constant
* @default
*/
export const RECORD_DELIMITER_OPTIONS = ["Line feed", "CRLF"];
/**
* @constant
* @default
*/
export const FIELD_DELIMITER_OPTIONS = ["Space", "Comma", "Semi-colon", "Colon", "Tab"];
/**
* Default from colour
*
* @constant
* @default
*/
export const COLOURS = {
min: "white",
max: "black"
};
/**
* Gets values from input for a plot.
*
* @param {string} input
* @param {string} recordDelimiter
* @param {string} fieldDelimiter
* @param {boolean} columnHeadingsAreIncluded - whether we should skip the first record
* @param {number} length
* @returns {Object[]}
*/
export function getValues(input, recordDelimiter, fieldDelimiter, columnHeadingsAreIncluded, length) {
let headings;
const values = [];
input
.split(recordDelimiter)
.forEach((row, rowIndex) => {
const split = row.split(fieldDelimiter);
if (split.length !== length) throw new OperationError(`Each row must have length ${length}.`);
if (columnHeadingsAreIncluded && rowIndex === 0) {
headings = split;
} else {
values.push(split);
}
});
return { headings, values };
}
/**
* Gets values from input for a scatter plot.
*
* @param {string} input
* @param {string} recordDelimiter
* @param {string} fieldDelimiter
* @param {boolean} columnHeadingsAreIncluded - whether we should skip the first record
* @returns {Object[]}
*/
export function getScatterValues(input, recordDelimiter, fieldDelimiter, columnHeadingsAreIncluded) {
let { headings, values } = getValues(
input,
recordDelimiter,
fieldDelimiter,
columnHeadingsAreIncluded,
2
);
if (headings) {
headings = {x: headings[0], y: headings[1]};
}
values = values.map(row => {
const x = parseFloat(row[0], 10),
y = parseFloat(row[1], 10);
if (Number.isNaN(x)) throw new OperationError("Values must be numbers in base 10.");
if (Number.isNaN(y)) throw new OperationError("Values must be numbers in base 10.");
return [x, y];
});
return { headings, values };
}
/**
* Gets values from input for a scatter plot with colour from the third column.
*
* @param {string} input
* @param {string} recordDelimiter
* @param {string} fieldDelimiter
* @param {boolean} columnHeadingsAreIncluded - whether we should skip the first record
* @returns {Object[]}
*/
export function getScatterValuesWithColour(input, recordDelimiter, fieldDelimiter, columnHeadingsAreIncluded) {
let { headings, values } = getValues(
input,
recordDelimiter, fieldDelimiter,
columnHeadingsAreIncluded,
3
);
if (headings) {
headings = {x: headings[0], y: headings[1]};
}
values = values.map(row => {
const x = parseFloat(row[0], 10),
y = parseFloat(row[1], 10),
colour = row[2];
if (Number.isNaN(x)) throw new OperationError("Values must be numbers in base 10.");
if (Number.isNaN(y)) throw new OperationError("Values must be numbers in base 10.");
return [x, y, colour];
});
return { headings, values };
}
/**
* Gets values from input for a time series plot.
*
* @param {string} input
* @param {string} recordDelimiter
* @param {string} fieldDelimiter
* @param {boolean} columnHeadingsAreIncluded - whether we should skip the first record
* @returns {Object[]}
*/
export function getSeriesValues(input, recordDelimiter, fieldDelimiter, columnHeadingsAreIncluded) {
const { values } = getValues(
input,
recordDelimiter, fieldDelimiter,
false,
3
);
let xValues = new Set();
const series = {};
values.forEach(row => {
const serie = row[0],
xVal = row[1],
val = parseFloat(row[2], 10);
if (Number.isNaN(val)) throw new OperationError("Values must be numbers in base 10.");
xValues.add(xVal);
if (typeof series[serie] === "undefined") series[serie] = {};
series[serie][xVal] = val;
});
xValues = new Array(...xValues);
const seriesList = [];
for (const seriesName in series) {
const serie = series[seriesName];
seriesList.push({name: seriesName, data: serie});
}
return { xValues, series: seriesList };
}

285
src/core/lib/Protobuf.mjs Normal file
View File

@@ -0,0 +1,285 @@
import Utils from "../Utils";
/**
* Protobuf lib. Contains functions to decode protobuf serialised
* data without a schema or .proto file.
*
* Provides utility functions to encode and decode variable length
* integers (varint).
*
* @author GCHQ Contributor [3]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
class Protobuf {
/**
* Protobuf constructor
*
* @param {byteArray} data
*/
constructor(data) {
// Check we have a byteArray
if (data instanceof Array) {
this.data = data;
} else {
throw new Error("Protobuf input must be a byteArray");
}
// Set up masks
this.TYPE = 0x07;
this.NUMBER = 0x78;
this.MSB = 0x80;
this.VALUE = 0x7f;
// Declare offset and length
this.offset = 0;
this.LENGTH = data.length;
}
// Public Functions
/**
* Encode a varint from a number
*
* @param {number} number
* @returns {byteArray}
*/
static varIntEncode(number) {
const MSB = 0x80,
VALUE = 0x7f,
MSBALL = ~VALUE,
INT = Math.pow(2, 31);
const out = [];
let offset = 0;
while (number >= INT) {
out[offset++] = (number & 0xff) | MSB;
number /= 128;
}
while (number & MSBALL) {
out[offset++] = (number & 0xff) | MSB;
number >>>= 7;
}
out[offset] = number | 0;
return out;
}
/**
* Decode a varint from the byteArray
*
* @param {byteArray} input
* @returns {number}
*/
static varIntDecode(input) {
const pb = new Protobuf(input);
return pb._varInt();
}
/**
* Parse Protobuf data
*
* @param {byteArray} input
* @returns {Object}
*/
static decode(input) {
const pb = new Protobuf(input);
return pb._parse();
}
// Private Class Functions
/**
* Main private parsing function
*
* @private
* @returns {Object}
*/
_parse() {
let object = {};
// Continue reading whilst we still have data
while (this.offset < this.LENGTH) {
const field = this._parseField();
object = this._addField(field, object);
}
// Throw an error if we have gone beyond the end of the data
if (this.offset > this.LENGTH) {
throw new Error("Exhausted Buffer");
}
return object;
}
/**
* Add a field read from the protobuf data into the Object. As
* protobuf fields can appear multiple times, if the field already
* exists we need to add the new field into an array of fields
* for that key.
*
* @private
* @param {Object} field
* @param {Object} object
* @returns {Object}
*/
_addField(field, object) {
// Get the field key/values
const key = field.key;
const value = field.value;
object[key] = object.hasOwnProperty(key) ?
object[key] instanceof Array ?
object[key].concat([value]) :
[object[key], value] :
value;
return object;
}
/**
* Parse a field and return the Object read from the record
*
* @private
* @returns {Object}
*/
_parseField() {
// Get the field headers
const header = this._fieldHeader();
const type = header.type;
const key = header.key;
switch (type) {
// varint
case 0:
return { "key": key, "value": this._varInt() };
// fixed 64
case 1:
return { "key": key, "value": this._uint64() };
// length delimited
case 2:
return { "key": key, "value": this._lenDelim() };
// fixed 32
case 5:
return { "key": key, "value": this._uint32() };
// unknown type
default:
throw new Error("Unknown type 0x" + type.toString(16));
}
}
/**
* Parse the field header and return the type and key
*
* @private
* @returns {Object}
*/
_fieldHeader() {
// Make sure we call type then number to preserve offset
return { "type": this._fieldType(), "key": this._fieldNumber() };
}
/**
* Parse the field type from the field header. Type is stored in the
* lower 3 bits of the tag byte. This does not move the offset on as
* we need to read the field number from the tag byte too.
*
* @private
* @returns {number}
*/
_fieldType() {
// Field type stored in lower 3 bits of tag byte
return this.data[this.offset] & this.TYPE;
}
/**
* Parse the field number (i.e. the key) from the field header. The
* field number is stored in the upper 5 bits of the tag byte - but
* is also varint encoded so the follow on bytes may need to be read
* when field numbers are > 15.
*
* @private
* @returns {number}
*/
_fieldNumber() {
let shift = -3;
let fieldNumber = 0;
do {
fieldNumber += shift < 28 ?
shift === -3 ?
(this.data[this.offset] & this.NUMBER) >> -shift :
(this.data[this.offset] & this.VALUE) << shift :
(this.data[this.offset] & this.VALUE) * Math.pow(2, shift);
shift += 7;
} while ((this.data[this.offset++] & this.MSD) === this.MSB);
return fieldNumber;
}
// Field Parsing Functions
/**
* Read off a varint from the data
*
* @private
* @returns {number}
*/
_varInt() {
let value = 0;
let shift = 0;
// Keep reading while upper bit set
do {
value += shift < 28 ?
(this.data[this.offset] & this.VALUE) << shift :
(this.data[this.offset] & this.VALUE) * Math.pow(2, shift);
shift += 7;
} while ((this.data[this.offset++] & this.MSB) === this.MSB);
return value;
}
/**
* Read off a 64 bit unsigned integer from the data
*
* @private
* @returns {number}
*/
_uint64() {
// Read off a Uint64
let num = this.data[this.offset++] * 0x1000000 + (this.data[this.offset++] << 16) + (this.data[this.offset++] << 8) + this.data[this.offset++];
num = num * 0x100000000 + this.data[this.offset++] * 0x1000000 + (this.data[this.offset++] << 16) + (this.data[this.offset++] << 8) + this.data[this.offset++];
return num;
}
/**
* Read off a length delimited field from the data
*
* @private
* @returns {Object|string}
*/
_lenDelim() {
// Read off the field length
const length = this._varInt();
const fieldBytes = this.data.slice(this.offset, this.offset + length);
let field;
try {
// Attempt to parse as a new Protobuf Object
const pbObject = new Protobuf(fieldBytes);
field = pbObject._parse();
} catch (err) {
// Otherwise treat as bytes
field = Utils.byteArrayToChars(fieldBytes);
}
// Move the offset and return the field
this.offset += length;
return field;
}
/**
* Read a 32 bit unsigned integer from the data
*
* @private
* @returns {number}
*/
_uint32() {
// Use a dataview to read off the integer
const dataview = new DataView(new Uint8Array(this.data.slice(this.offset, this.offset + 4)).buffer);
const value = dataview.getUint32(0);
this.offset += 4;
return value;
}
}
export default Protobuf;

View File

@@ -65,8 +65,8 @@ class AESEncrypt extends Operation {
* @throws {OperationError} if invalid key length
*/
run(input, args) {
const key = Utils.convertToByteArray(args[0].string, args[0].option),
iv = Utils.convertToByteArray(args[1].string, args[1].option),
const key = Utils.convertToByteString(args[0].string, args[0].option),
iv = Utils.convertToByteString(args[1].string, args[1].option),
mode = args[2],
inputType = args[3],
outputType = args[4];

View File

@@ -0,0 +1,79 @@
/**
* @author h345983745
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import blakejs from "blakejs";
import OperationError from "../errors/OperationError";
import Utils from "../Utils";
import { toBase64 } from "../lib/Base64";
/**
* BLAKE2b operation
*/
class BLAKE2b extends Operation {
/**
* BLAKE2b constructor
*/
constructor() {
super();
this.name = "BLAKE2b";
this.module = "Hashing";
this.description = `Performs BLAKE2b hashing on the input.
<br><br> BLAKE2b is a flavour of the BLAKE cryptographic hash function that is optimized for 64-bit platforms and produces digests of any size between 1 and 64 bytes.
<br><br> Supports the use of an optional key.`;
this.infoURL = "https://wikipedia.org/wiki/BLAKE_(hash_function)#BLAKE2b_algorithm";
this.inputType = "ArrayBuffer";
this.outputType = "string";
this.args = [
{
"name": "Size",
"type": "option",
"value": ["512", "384", "256", "160", "128"]
}, {
"name": "Output Encoding",
"type": "option",
"value": ["Hex", "Base64", "Raw"]
}, {
"name": "Key",
"type": "toggleString",
"value": "",
"toggleValues": ["UTF8", "Decimal", "Base64", "Hex", "Latin1"]
}
];
}
/**
* @param {ArrayBuffer} input
* @param {Object[]} args
* @returns {string} The input having been hashed with BLAKE2b in the encoding format speicifed.
*/
run(input, args) {
const [outSize, outFormat] = args;
let key = Utils.convertToByteArray(args[2].string || "", args[2].option);
if (key.length === 0) {
key = null;
} else if (key.length > 64) {
throw new OperationError(["Key cannot be greater than 64 bytes", "It is currently " + key.length + " bytes."].join("\n"));
}
input = new Uint8Array(input);
switch (outFormat) {
case "Hex":
return blakejs.blake2bHex(input, key, outSize / 8);
case "Base64":
return toBase64(blakejs.blake2b(input, key, outSize / 8));
case "Raw":
return Utils.arrayBufferToStr(blakejs.blake2b(input, key, outSize / 8).buffer);
default:
return new OperationError("Unsupported Output Type");
}
}
}
export default BLAKE2b;

View File

@@ -0,0 +1,80 @@
/**
* @author h345983745
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import blakejs from "blakejs";
import OperationError from "../errors/OperationError";
import Utils from "../Utils";
import { toBase64 } from "../lib/Base64";
/**
* BLAKE2s Operation
*/
class BLAKE2s extends Operation {
/**
* BLAKE2s constructor
*/
constructor() {
super();
this.name = "BLAKE2s";
this.module = "Hashing";
this.description = `Performs BLAKE2s hashing on the input.
<br><br>BLAKE2s is a flavour of the BLAKE cryptographic hash function that is optimized for 8- to 32-bit platforms and produces digests of any size between 1 and 32 bytes.
<br><br>Supports the use of an optional key.`;
this.infoURL = "https://wikipedia.org/wiki/BLAKE_(hash_function)#BLAKE2";
this.inputType = "ArrayBuffer";
this.outputType = "string";
this.args = [
{
"name": "Size",
"type": "option",
"value": ["256", "160", "128"]
}, {
"name": "Output Encoding",
"type": "option",
"value": ["Hex", "Base64", "Raw"]
},
{
"name": "Key",
"type": "toggleString",
"value": "",
"toggleValues": ["UTF8", "Decimal", "Base64", "Hex", "Latin1"]
}
];
}
/**
* @param {ArrayBuffer} input
* @param {Object[]} args
* @returns {string} The input having been hashed with BLAKE2s in the encoding format speicifed.
*/
run(input, args) {
const [outSize, outFormat] = args;
let key = Utils.convertToByteArray(args[2].string || "", args[2].option);
if (key.length === 0) {
key = null;
} else if (key.length > 32) {
throw new OperationError(["Key cannot be greater than 32 bytes", "It is currently " + key.length + " bytes."].join("\n"));
}
input = new Uint8Array(input);
switch (outFormat) {
case "Hex":
return blakejs.blake2sHex(input, key, outSize / 8);
case "Base64":
return toBase64(blakejs.blake2s(input, key, outSize / 8));
case "Raw":
return Utils.arrayBufferToStr(blakejs.blake2s(input, key, outSize / 8).buffer);
default:
return new OperationError("Unsupported Output Type");
}
}
}
export default BLAKE2s;

View File

@@ -0,0 +1,72 @@
/**
* @author Matt C [me@mitt.dev]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import OperationError from "../errors/OperationError";
import Bzip2 from "libbzip2-wasm";
/**
* Bzip2 Compress operation
*/
class Bzip2Compress extends Operation {
/**
* Bzip2Compress constructor
*/
constructor() {
super();
this.name = "Bzip2 Compress";
this.module = "Compression";
this.description = "Bzip2 is a compression library developed by Julian Seward (of GHC fame) that uses the Burrows-Wheeler algorithm. It only supports compressing single files and its compression is slow, however is more effective than Deflate (.gz & .zip).";
this.infoURL = "https://wikipedia.org/wiki/Bzip2";
this.inputType = "ArrayBuffer";
this.outputType = "ArrayBuffer";
this.args = [
{
name: "Block size (100s of kb)",
type: "number",
value: 9,
min: 1,
max: 9
},
{
name: "Work factor",
type: "number",
value: 30
}
];
}
/**
* @param {ArrayBuffer} input
* @param {Object[]} args
* @returns {File}
*/
run(input, args) {
const [blockSize, workFactor] = args;
if (input.byteLength <= 0) {
throw new OperationError("Please provide an input.");
}
if (ENVIRONMENT_IS_WORKER()) self.sendStatusMessage("Loading Bzip2...");
return new Promise((resolve, reject) => {
Bzip2().then(bzip2 => {
if (ENVIRONMENT_IS_WORKER()) self.sendStatusMessage("Compressing data...");
const inpArray = new Uint8Array(input);
const bzip2cc = bzip2.compressBZ2(inpArray, blockSize, workFactor);
if (bzip2cc.error !== 0) {
reject(new OperationError(bzip2cc.error_msg));
} else {
const output = bzip2cc.output;
resolve(output.buffer.slice(output.byteOffset, output.byteLength + output.byteOffset));
}
});
});
}
}
export default Bzip2Compress;

View File

@@ -1,12 +1,12 @@
/**
* @author n1474335 [n1474335@gmail.com]
* @copyright Crown Copyright 2016
* @author Matt C [me@mitt.dev]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import bzip2 from "../vendor/bzip2";
import OperationError from "../errors/OperationError";
import Bzip2 from "libbzip2-wasm";
/**
* Bzip2 Decompress operation
@@ -23,9 +23,15 @@ class Bzip2Decompress extends Operation {
this.module = "Compression";
this.description = "Decompresses data using the Bzip2 algorithm.";
this.infoURL = "https://wikipedia.org/wiki/Bzip2";
this.inputType = "byteArray";
this.outputType = "string";
this.args = [];
this.inputType = "ArrayBuffer";
this.outputType = "ArrayBuffer";
this.args = [
{
name: "Use low-memory, slower decompression algorithm",
type: "boolean",
value: false
}
];
this.patterns = [
{
"match": "^\\x42\\x5a\\x68",
@@ -41,14 +47,24 @@ class Bzip2Decompress extends Operation {
* @returns {string}
*/
run(input, args) {
const compressed = new Uint8Array(input);
try {
const bzip2Reader = bzip2.array(compressed);
return bzip2.simple(bzip2Reader);
} catch (err) {
throw new OperationError(err);
const [small] = args;
if (input.byteLength <= 0) {
throw new OperationError("Please provide an input.");
}
if (ENVIRONMENT_IS_WORKER()) self.sendStatusMessage("Loading Bzip2...");
return new Promise((resolve, reject) => {
Bzip2().then(bzip2 => {
if (ENVIRONMENT_IS_WORKER()) self.sendStatusMessage("Decompressing data...");
const inpArray = new Uint8Array(input);
const bzip2cc = bzip2.decompressBZ2(inpArray, small ? 1 : 0);
if (bzip2cc.error !== 0) {
reject(new OperationError(bzip2cc.error_msg));
} else {
const output = bzip2cc.output;
resolve(output.buffer.slice(output.byteOffset, output.byteLength + output.byteOffset));
}
});
});
}
}

View File

@@ -4,8 +4,13 @@
* @license Apache-2.0
*/
import * as d3temp from "d3";
import * as nodomtemp from "nodom";
import Operation from "../Operation";
import Utils from "../Utils";
const d3 = d3temp.default ? d3temp.default : d3temp;
const nodom = nodomtemp.default ? nodomtemp.default: nodomtemp;
/**
* Entropy operation
@@ -19,30 +24,45 @@ class Entropy extends Operation {
super();
this.name = "Entropy";
this.module = "Default";
this.module = "Charts";
this.description = "Shannon Entropy, in the context of information theory, is a measure of the rate at which information is produced by a source of data. It can be used, in a broad sense, to detect whether data is likely to be structured or unstructured. 8 is the maximum, representing highly unstructured, 'random' data. English language text usually falls somewhere between 3.5 and 5. Properly encrypted or compressed data should have an entropy of over 7.5.";
this.infoURL = "https://wikipedia.org/wiki/Entropy_(information_theory)";
this.inputType = "byteArray";
this.outputType = "number";
this.inputType = "ArrayBuffer";
this.outputType = "json";
this.presentType = "html";
this.args = [];
this.args = [
{
"name": "Visualisation",
"type": "option",
"value": ["Shannon scale", "Histogram (Bar)", "Histogram (Line)", "Curve", "Image"]
}
];
}
/**
* @param {byteArray} input
* @param {Object[]} args
* Calculates the frequency of bytes in the input.
*
* @param {Uint8Array} input
* @returns {number}
*/
run(input, args) {
calculateShannonEntropy(input) {
const prob = [],
uniques = input.unique(),
str = Utils.byteArrayToChars(input);
let i;
occurrences = new Array(256).fill(0);
for (i = 0; i < uniques.length; i++) {
prob.push(str.count(Utils.chr(uniques[i])) / input.length);
// Count occurrences of each byte in the input
let i;
for (i = 0; i < input.length; i++) {
occurrences[input[i]]++;
}
// Store probability list
for (i = 0; i < occurrences.length; i++) {
if (occurrences[i] > 0) {
prob.push(occurrences[i] / input.length);
}
}
// Calculate Shannon entropy
let entropy = 0,
p;
@@ -54,44 +74,357 @@ class Entropy extends Operation {
return -entropy;
}
/**
* Calculates the scanning entropy of the input
*
* @param {Uint8Array} inputBytes
* @returns {Object}
*/
calculateScanningEntropy(inputBytes) {
const entropyData = [];
const binWidth = inputBytes.length < 256 ? 8 : 256;
for (let bytePos = 0; bytePos < inputBytes.length; bytePos += binWidth) {
const block = inputBytes.slice(bytePos, bytePos+binWidth);
entropyData.push(this.calculateShannonEntropy(block));
}
return { entropyData, binWidth };
}
/**
* Calculates the frequency of bytes in the input.
*
* @param {object} svg
* @param {function} xScale
* @param {function} yScale
* @param {integer} svgHeight
* @param {integer} svgWidth
* @param {object} margins
* @param {string} xTitle
* @param {string} yTitle
*/
createAxes(svg, xScale, yScale, svgHeight, svgWidth, margins, title, xTitle, yTitle) {
// Axes
const yAxis = d3.axisLeft()
.scale(yScale);
const xAxis = d3.axisBottom()
.scale(xScale);
svg.append("g")
.attr("transform", `translate(0, ${svgHeight - margins.bottom})`)
.call(xAxis);
svg.append("g")
.attr("transform", `translate(${margins.left},0)`)
.call(yAxis);
// Axes labels
svg.append("text")
.attr("transform", "rotate(-90)")
.attr("y", 0 - margins.left)
.attr("x", 0 - (svgHeight / 2))
.attr("dy", "1em")
.style("text-anchor", "middle")
.text(yTitle);
svg.append("text")
.attr("transform", `translate(${svgWidth / 2}, ${svgHeight - margins.bottom + 40})`)
.style("text-anchor", "middle")
.text(xTitle);
// Add title
svg.append("text")
.attr("transform", `translate(${svgWidth / 2}, ${margins.top - 10})`)
.style("text-anchor", "middle")
.text(title);
}
/**
* Calculates the frequency of bytes in the input.
*
* @param {Uint8Array} inputBytes
* @returns {number[]}
*/
calculateByteFrequency(inputBytes) {
const freq = new Array(256).fill(0);
if (inputBytes.length === 0) return freq;
// Count occurrences of each byte in the input
let i;
for (i = 0; i < inputBytes.length; i++) {
freq[inputBytes[i]]++;
}
for (i = 0; i < freq.length; i++) {
freq[i] = freq[i] / inputBytes.length;
}
return freq;
}
/**
* Calculates the frequency of bytes in the input.
*
* @param {number[]} byteFrequency
* @returns {HTML}
*/
createByteFrequencyLineHistogram(byteFrequency) {
const margins = { top: 30, right: 20, bottom: 50, left: 30 };
const svgWidth = 500,
svgHeight = 500;
const document = new nodom.Document();
let svg = document.createElement("svg");
svg = d3.select(svg)
.attr("width", "100%")
.attr("height", "100%")
.attr("viewBox", `0 0 ${svgWidth} ${svgHeight}`);
const yScale = d3.scaleLinear()
.domain([0, d3.max(byteFrequency, d => d)])
.range([svgHeight - margins.bottom, margins.top]);
const xScale = d3.scaleLinear()
.domain([0, byteFrequency.length - 1])
.range([margins.left, svgWidth - margins.right]);
const line = d3.line()
.x((_, i) => xScale(i))
.y(d => yScale(d))
.curve(d3.curveMonotoneX);
svg.append("path")
.datum(byteFrequency)
.attr("fill", "none")
.attr("stroke", "steelblue")
.attr("d", line);
this.createAxes(svg, xScale, yScale, svgHeight, svgWidth, margins, "", "Byte", "Byte Frequency");
return svg._groups[0][0].outerHTML;
}
/**
* Creates a byte frequency histogram
*
* @param {number[]} byteFrequency
* @returns {HTML}
*/
createByteFrequencyBarHistogram(byteFrequency) {
const margins = { top: 30, right: 20, bottom: 50, left: 30 };
const svgWidth = 500,
svgHeight = 500,
binWidth = 1;
const document = new nodom.Document();
let svg = document.createElement("svg");
svg = d3.select(svg)
.attr("width", "100%")
.attr("height", "100%")
.attr("viewBox", `0 0 ${svgWidth} ${svgHeight}`);
const yExtent = d3.extent(byteFrequency, d => d);
const yScale = d3.scaleLinear()
.domain(yExtent)
.range([svgHeight - margins.bottom, margins.top]);
const xScale = d3.scaleLinear()
.domain([0, byteFrequency.length - 1])
.range([margins.left - binWidth, svgWidth - margins.right]);
svg.selectAll("rect")
.data(byteFrequency)
.enter().append("rect")
.attr("x", (_, i) => xScale(i) + binWidth)
.attr("y", dataPoint => yScale(dataPoint))
.attr("width", binWidth)
.attr("height", dataPoint => yScale(yExtent[0]) - yScale(dataPoint))
.attr("fill", "blue");
this.createAxes(svg, xScale, yScale, svgHeight, svgWidth, margins, "", "Byte", "Byte Frequency");
return svg._groups[0][0].outerHTML;
}
/**
* Creates a byte frequency histogram
*
* @param {number[]} entropyData
* @returns {HTML}
*/
createEntropyCurve(entropyData) {
const margins = { top: 30, right: 20, bottom: 50, left: 30 };
const svgWidth = 500,
svgHeight = 500;
const document = new nodom.Document();
let svg = document.createElement("svg");
svg = d3.select(svg)
.attr("width", "100%")
.attr("height", "100%")
.attr("viewBox", `0 0 ${svgWidth} ${svgHeight}`);
const yScale = d3.scaleLinear()
.domain([0, d3.max(entropyData, d => d)])
.range([svgHeight - margins.bottom, margins.top]);
const xScale = d3.scaleLinear()
.domain([0, entropyData.length])
.range([margins.left, svgWidth - margins.right]);
const line = d3.line()
.x((_, i) => xScale(i))
.y(d => yScale(d))
.curve(d3.curveMonotoneX);
if (entropyData.length > 0) {
svg.append("path")
.datum(entropyData)
.attr("d", line);
svg.selectAll("path").attr("fill", "none").attr("stroke", "steelblue");
}
this.createAxes(svg, xScale, yScale, svgHeight, svgWidth, margins, "Scanning Entropy", "Block", "Entropy");
return svg._groups[0][0].outerHTML;
}
/**
* Creates an image representation of the entropy
*
* @param {number[]} entropyData
* @returns {HTML}
*/
createEntropyImage(entropyData) {
const svgHeight = 100,
svgWidth = 100,
cellSize = 1,
nodes = [];
for (let i = 0; i < entropyData.length; i++) {
nodes.push({
x: i % svgWidth,
y: Math.floor(i / svgWidth),
entropy: entropyData[i]
});
}
const document = new nodom.Document();
let svg = document.createElement("svg");
svg = d3.select(svg)
.attr("width", "100%")
.attr("height", "100%")
.attr("viewBox", `0 0 ${svgWidth} ${svgHeight}`);
const greyScale = d3.scaleLinear()
.domain([0, d3.max(entropyData, d => d)])
.range(["#000000", "#FFFFFF"])
.interpolate(d3.interpolateRgb);
svg
.selectAll("rect")
.data(nodes)
.enter().append("rect")
.attr("x", d => d.x * cellSize)
.attr("y", d => d.y * cellSize)
.attr("width", cellSize)
.attr("height", cellSize)
.style("fill", d => greyScale(d.entropy));
return svg._groups[0][0].outerHTML;
}
/**
* Displays the entropy as a scale bar for web apps.
*
* @param {number} entropy
* @returns {html}
* @returns {HTML}
*/
present(entropy) {
createShannonEntropyVisualization(entropy) {
return `Shannon entropy: ${entropy}
<br><canvas id='chart-area'></canvas><br>
- 0 represents no randomness (i.e. all the bytes in the data have the same value) whereas 8, the maximum, represents a completely random string.
- Standard English text usually falls somewhere between 3.5 and 5.
- Properly encrypted or compressed data of a reasonable length should have an entropy of over 7.5.
<br><canvas id='chart-area'></canvas><br>
- 0 represents no randomness (i.e. all the bytes in the data have the same value) whereas 8, the maximum, represents a completely random string.
- Standard English text usually falls somewhere between 3.5 and 5.
- Properly encrypted or compressed data of a reasonable length should have an entropy of over 7.5.
The following results show the entropy of chunks of the input data. Chunks with particularly high entropy could suggest encrypted or compressed sections.
The following results show the entropy of chunks of the input data. Chunks with particularly high entropy could suggest encrypted or compressed sections.
<br><script>
var canvas = document.getElementById("chart-area"),
parentRect = canvas.parentNode.getBoundingClientRect(),
entropy = ${entropy},
height = parentRect.height * 0.25;
<br><script>
var canvas = document.getElementById("chart-area"),
parentRect = canvas.parentNode.getBoundingClientRect(),
entropy = ${entropy},
height = parentRect.height * 0.25;
canvas.width = parentRect.width * 0.95;
canvas.height = height > 150 ? 150 : height;
canvas.width = parentRect.width * 0.95;
canvas.height = height > 150 ? 150 : height;
CanvasComponents.drawScaleBar(canvas, entropy, 8, [
{
label: "English text",
min: 3.5,
max: 5
},{
label: "Encrypted/compressed",
min: 7.5,
max: 8
}
]);
</script>`;
CanvasComponents.drawScaleBar(canvas, entropy, 8, [
{
label: "English text",
min: 3.5,
max: 5
},{
label: "Encrypted/compressed",
min: 7.5,
max: 8
}
]);
</script>`;
}
/**
* @param {ArrayBuffer} input
* @param {Object[]} args
* @returns {json}
*/
run(input, args) {
const visualizationType = args[0];
input = new Uint8Array(input);
switch (visualizationType) {
case "Histogram (Bar)":
case "Histogram (Line)":
return this.calculateByteFrequency(input);
case "Curve":
case "Image":
return this.calculateScanningEntropy(input).entropyData;
case "Shannon scale":
default:
return this.calculateShannonEntropy(input);
}
}
/**
* Displays the entropy in a visualisation for web apps.
*
* @param {json} entropyData
* @param {Object[]} args
* @returns {html}
*/
present(entropyData, args) {
const visualizationType = args[0];
switch (visualizationType) {
case "Histogram (Bar)":
return this.createByteFrequencyBarHistogram(entropyData);
case "Histogram (Line)":
return this.createByteFrequencyLineHistogram(entropyData);
case "Curve":
return this.createEntropyCurve(entropyData);
case "Image":
return this.createEntropyImage(entropyData);
case "Shannon scale":
default:
return this.createShannonEntropyVisualization(entropyData);
}
}
}
export default Entropy;

View File

@@ -28,6 +28,8 @@ import Fletcher64Checksum from "./Fletcher64Checksum";
import Adler32Checksum from "./Adler32Checksum";
import CRC16Checksum from "./CRC16Checksum";
import CRC32Checksum from "./CRC32Checksum";
import BLAKE2b from "./BLAKE2b";
import BLAKE2s from "./BLAKE2s";
/**
* Generate all hashes operation
@@ -86,6 +88,14 @@ class GenerateAllHashes extends Operation {
"\nWhirlpool-0: " + (new Whirlpool()).run(arrayBuffer, ["Whirlpool-0"]) +
"\nWhirlpool-T: " + (new Whirlpool()).run(arrayBuffer, ["Whirlpool-T"]) +
"\nWhirlpool: " + (new Whirlpool()).run(arrayBuffer, ["Whirlpool"]) +
"\nBLAKE2b-128: " + (new BLAKE2b).run(arrayBuffer, ["128", "Hex", {string: "", option: "UTF8"}]) +
"\nBLAKE2b-160: " + (new BLAKE2b).run(arrayBuffer, ["160", "Hex", {string: "", option: "UTF8"}]) +
"\nBLAKE2b-256: " + (new BLAKE2b).run(arrayBuffer, ["256", "Hex", {string: "", option: "UTF8"}]) +
"\nBLAKE2b-384: " + (new BLAKE2b).run(arrayBuffer, ["384", "Hex", {string: "", option: "UTF8"}]) +
"\nBLAKE2b-512: " + (new BLAKE2b).run(arrayBuffer, ["512", "Hex", {string: "", option: "UTF8"}]) +
"\nBLAKE2s-128: " + (new BLAKE2s).run(arrayBuffer, ["128", "Hex", {string: "", option: "UTF8"}]) +
"\nBLAKE2s-160: " + (new BLAKE2s).run(arrayBuffer, ["160", "Hex", {string: "", option: "UTF8"}]) +
"\nBLAKE2s-256: " + (new BLAKE2s).run(arrayBuffer, ["256", "Hex", {string: "", option: "UTF8"}]) +
"\nSSDEEP: " + (new SSDEEP()).run(str) +
"\nCTPH: " + (new CTPH()).run(str) +
"\n\nChecksums:" +

View File

@@ -0,0 +1,41 @@
/**
* @author tlwr [toby@toby.codes]
* @author Matt C [me@mitt.dev]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
/**
* HTML To Text operation
*/
class HTMLToText extends Operation {
/**
* HTMLToText constructor
*/
constructor() {
super();
this.name = "HTML To Text";
this.module = "Default";
this.description = "Converts an HTML output from an operation to a readable string instead of being rendered in the DOM.";
this.infoURL = "";
this.inputType = "html";
this.outputType = "string";
this.args = [];
}
/**
* @param {html} input
* @param {Object[]} args
* @returns {string}
*/
run(input, args) {
return input;
}
}
export default HTMLToText;

View File

@@ -0,0 +1,266 @@
/**
* @author tlwr [toby@toby.codes]
* @author Matt C [me@mitt.dev]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import * as d3temp from "d3";
import * as nodomtemp from "nodom";
import { getScatterValues, RECORD_DELIMITER_OPTIONS, COLOURS, FIELD_DELIMITER_OPTIONS } from "../lib/Charts";
import Operation from "../Operation";
import OperationError from "../errors/OperationError";
import Utils from "../Utils";
const d3 = d3temp.default ? d3temp.default : d3temp;
const nodom = nodomtemp.default ? nodomtemp.default: nodomtemp;
/**
* Heatmap chart operation
*/
class HeatmapChart extends Operation {
/**
* HeatmapChart constructor
*/
constructor() {
super();
this.name = "Heatmap chart";
this.module = "Charts";
this.description = "A heatmap is a graphical representation of data where the individual values contained in a matrix are represented as colors.";
this.infoURL = "https://wikipedia.org/wiki/Heat_map";
this.inputType = "string";
this.outputType = "html";
this.args = [
{
name: "Record delimiter",
type: "option",
value: RECORD_DELIMITER_OPTIONS,
},
{
name: "Field delimiter",
type: "option",
value: FIELD_DELIMITER_OPTIONS,
},
{
name: "Number of vertical bins",
type: "number",
value: 25,
},
{
name: "Number of horizontal bins",
type: "number",
value: 25,
},
{
name: "Use column headers as labels",
type: "boolean",
value: true,
},
{
name: "X label",
type: "string",
value: "",
},
{
name: "Y label",
type: "string",
value: "",
},
{
name: "Draw bin edges",
type: "boolean",
value: false,
},
{
name: "Min colour value",
type: "string",
value: COLOURS.min,
},
{
name: "Max colour value",
type: "string",
value: COLOURS.max,
},
];
}
/**
* Heatmap chart operation.
*
* @param {string} input
* @param {Object[]} args
* @returns {html}
*/
run(input, args) {
const recordDelimiter = Utils.charRep(args[0]),
fieldDelimiter = Utils.charRep(args[1]),
vBins = args[2],
hBins = args[3],
columnHeadingsAreIncluded = args[4],
drawEdges = args[7],
minColour = args[8],
maxColour = args[9],
dimension = 500;
if (vBins <= 0) throw new OperationError("Number of vertical bins must be greater than 0");
if (hBins <= 0) throw new OperationError("Number of horizontal bins must be greater than 0");
let xLabel = args[5],
yLabel = args[6];
const { headings, values } = getScatterValues(
input,
recordDelimiter,
fieldDelimiter,
columnHeadingsAreIncluded
);
if (headings) {
xLabel = headings.x;
yLabel = headings.y;
}
const document = new nodom.Document();
let svg = document.createElement("svg");
svg = d3.select(svg)
.attr("width", "100%")
.attr("height", "100%")
.attr("viewBox", `0 0 ${dimension} ${dimension}`);
const margin = {
top: 10,
right: 0,
bottom: 40,
left: 30,
},
width = dimension - margin.left - margin.right,
height = dimension - margin.top - margin.bottom,
binWidth = width / hBins,
binHeight = height/ vBins,
marginedSpace = svg.append("g")
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
const bins = this.getHeatmapPacking(values, vBins, hBins),
maxCount = Math.max(...bins.map(row => {
const lengths = row.map(cell => cell.length);
return Math.max(...lengths);
}));
const xExtent = d3.extent(values, d => d[0]),
yExtent = d3.extent(values, d => d[1]);
const xAxis = d3.scaleLinear()
.domain(xExtent)
.range([0, width]);
const yAxis = d3.scaleLinear()
.domain(yExtent)
.range([height, 0]);
const colour = d3.scaleSequential(d3.interpolateLab(minColour, maxColour))
.domain([0, maxCount]);
marginedSpace.append("clipPath")
.attr("id", "clip")
.append("rect")
.attr("width", width)
.attr("height", height);
marginedSpace.append("g")
.attr("class", "bins")
.attr("clip-path", "url(#clip)")
.selectAll("g")
.data(bins)
.enter()
.append("g")
.selectAll("rect")
.data(d => d)
.enter()
.append("rect")
.attr("x", (d) => binWidth * d.x)
.attr("y", (d) => (height - binHeight * (d.y + 1)))
.attr("width", binWidth)
.attr("height", binHeight)
.attr("fill", (d) => colour(d.length))
.attr("stroke", drawEdges ? "rgba(0, 0, 0, 0.5)" : "none")
.attr("stroke-width", drawEdges ? "0.5" : "none")
.append("title")
.text(d => {
const count = d.length,
perc = 100.0 * d.length / values.length,
tooltip = `Count: ${count}\n
Percentage: ${perc.toFixed(2)}%\n
`.replace(/\s{2,}/g, "\n");
return tooltip;
});
marginedSpace.append("g")
.attr("class", "axis axis--y")
.call(d3.axisLeft(yAxis).tickSizeOuter(-width));
svg.append("text")
.attr("transform", "rotate(-90)")
.attr("y", -margin.left)
.attr("x", -(height / 2))
.attr("dy", "1em")
.style("text-anchor", "middle")
.text(yLabel);
marginedSpace.append("g")
.attr("class", "axis axis--x")
.attr("transform", "translate(0," + height + ")")
.call(d3.axisBottom(xAxis).tickSizeOuter(-height));
svg.append("text")
.attr("x", width / 2)
.attr("y", dimension)
.style("text-anchor", "middle")
.text(xLabel);
return svg._groups[0][0].outerHTML;
}
/**
* Packs a list of x, y coordinates into a number of bins for use in a heatmap.
*
* @param {Object[]} points
* @param {number} number of vertical bins
* @param {number} number of horizontal bins
* @returns {Object[]} a list of bins (each bin is an Array) with x y coordinates, filled with the points
*/
getHeatmapPacking(values, vBins, hBins) {
const xBounds = d3.extent(values, d => d[0]),
yBounds = d3.extent(values, d => d[1]),
bins = [];
if (xBounds[0] === xBounds[1]) throw "Cannot pack points. There is no difference between the minimum and maximum X coordinate.";
if (yBounds[0] === yBounds[1]) throw "Cannot pack points. There is no difference between the minimum and maximum Y coordinate.";
for (let y = 0; y < vBins; y++) {
bins.push([]);
for (let x = 0; x < hBins; x++) {
const item = [];
item.y = y;
item.x = x;
bins[y].push(item);
} // x
} // y
const epsilon = 0.000000001; // This is to clamp values that are exactly the maximum;
values.forEach(v => {
const fractionOfY = (v[1] - yBounds[0]) / ((yBounds[1] + epsilon) - yBounds[0]),
fractionOfX = (v[0] - xBounds[0]) / ((xBounds[1] + epsilon) - xBounds[0]),
y = Math.floor(vBins * fractionOfY),
x = Math.floor(hBins * fractionOfX);
bins[y][x].push({x: v[0], y: v[1]});
});
return bins;
}
}
export default HeatmapChart;

View File

@@ -0,0 +1,296 @@
/**
* @author tlwr [toby@toby.codes]
* @author Matt C [me@mitt.dev]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import * as d3temp from "d3";
import * as d3hexbintemp from "d3-hexbin";
import * as nodomtemp from "nodom";
import { getScatterValues, RECORD_DELIMITER_OPTIONS, COLOURS, FIELD_DELIMITER_OPTIONS } from "../lib/Charts";
import Operation from "../Operation";
import Utils from "../Utils";
const d3 = d3temp.default ? d3temp.default : d3temp;
const d3hexbin = d3hexbintemp.default ? d3hexbintemp.default : d3hexbintemp;
const nodom = nodomtemp.default ? nodomtemp.default: nodomtemp;
/**
* Hex Density chart operation
*/
class HexDensityChart extends Operation {
/**
* HexDensityChart constructor
*/
constructor() {
super();
this.name = "Hex Density chart";
this.module = "Charts";
this.description = "Hex density charts are used in a similar way to scatter charts, however rather than rendering tens of thousands of points, it groups the points into a few hundred hexagons to show the distribution.";
this.inputType = "string";
this.outputType = "html";
this.args = [
{
name: "Record delimiter",
type: "option",
value: RECORD_DELIMITER_OPTIONS,
},
{
name: "Field delimiter",
type: "option",
value: FIELD_DELIMITER_OPTIONS,
},
{
name: "Pack radius",
type: "number",
value: 25,
},
{
name: "Draw radius",
type: "number",
value: 15,
},
{
name: "Use column headers as labels",
type: "boolean",
value: true,
},
{
name: "X label",
type: "string",
value: "",
},
{
name: "Y label",
type: "string",
value: "",
},
{
name: "Draw hexagon edges",
type: "boolean",
value: false,
},
{
name: "Min colour value",
type: "string",
value: COLOURS.min,
},
{
name: "Max colour value",
type: "string",
value: COLOURS.max,
},
{
name: "Draw empty hexagons within data boundaries",
type: "boolean",
value: false,
}
];
}
/**
* Hex Bin chart operation.
*
* @param {string} input
* @param {Object[]} args
* @returns {html}
*/
run(input, args) {
const recordDelimiter = Utils.charRep(args[0]),
fieldDelimiter = Utils.charRep(args[1]),
packRadius = args[2],
drawRadius = args[3],
columnHeadingsAreIncluded = args[4],
drawEdges = args[7],
minColour = args[8],
maxColour = args[9],
drawEmptyHexagons = args[10],
dimension = 500;
let xLabel = args[5],
yLabel = args[6];
const { headings, values } = getScatterValues(
input,
recordDelimiter,
fieldDelimiter,
columnHeadingsAreIncluded
);
if (headings) {
xLabel = headings.x;
yLabel = headings.y;
}
const document = new nodom.Document();
let svg = document.createElement("svg");
svg = d3.select(svg)
.attr("width", "100%")
.attr("height", "100%")
.attr("viewBox", `0 0 ${dimension} ${dimension}`);
const margin = {
top: 10,
right: 0,
bottom: 40,
left: 30,
},
width = dimension - margin.left - margin.right,
height = dimension - margin.top - margin.bottom,
marginedSpace = svg.append("g")
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
const hexbin = d3hexbin.hexbin()
.radius(packRadius)
.extent([0, 0], [width, height]);
const hexPoints = hexbin(values),
maxCount = Math.max(...hexPoints.map(b => b.length));
const xExtent = d3.extent(hexPoints, d => d.x),
yExtent = d3.extent(hexPoints, d => d.y);
xExtent[0] -= 2 * packRadius;
xExtent[1] += 3 * packRadius;
yExtent[0] -= 2 * packRadius;
yExtent[1] += 2 * packRadius;
const xAxis = d3.scaleLinear()
.domain(xExtent)
.range([0, width]);
const yAxis = d3.scaleLinear()
.domain(yExtent)
.range([height, 0]);
const colour = d3.scaleSequential(d3.interpolateLab(minColour, maxColour))
.domain([0, maxCount]);
marginedSpace.append("clipPath")
.attr("id", "clip")
.append("rect")
.attr("width", width)
.attr("height", height);
if (drawEmptyHexagons) {
marginedSpace.append("g")
.attr("class", "empty-hexagon")
.selectAll("path")
.data(this.getEmptyHexagons(hexPoints, packRadius))
.enter()
.append("path")
.attr("d", d => {
return `M${xAxis(d.x)},${yAxis(d.y)} ${hexbin.hexagon(drawRadius)}`;
})
.attr("fill", (d) => colour(0))
.attr("stroke", drawEdges ? "black" : "none")
.attr("stroke-width", drawEdges ? "0.5" : "none")
.append("title")
.text(d => {
const count = 0,
perc = 0,
tooltip = `Count: ${count}\n
Percentage: ${perc.toFixed(2)}%\n
Center: ${d.x.toFixed(2)}, ${d.y.toFixed(2)}\n
`.replace(/\s{2,}/g, "\n");
return tooltip;
});
}
marginedSpace.append("g")
.attr("class", "hexagon")
.attr("clip-path", "url(#clip)")
.selectAll("path")
.data(hexPoints)
.enter()
.append("path")
.attr("d", d => {
return `M${xAxis(d.x)},${yAxis(d.y)} ${hexbin.hexagon(drawRadius)}`;
})
.attr("fill", (d) => colour(d.length))
.attr("stroke", drawEdges ? "black" : "none")
.attr("stroke-width", drawEdges ? "0.5" : "none")
.append("title")
.text(d => {
const count = d.length,
perc = 100.0 * d.length / values.length,
CX = d.x,
CY = d.y,
xMin = Math.min(...d.map(d => d[0])),
xMax = Math.max(...d.map(d => d[0])),
yMin = Math.min(...d.map(d => d[1])),
yMax = Math.max(...d.map(d => d[1])),
tooltip = `Count: ${count}\n
Percentage: ${perc.toFixed(2)}%\n
Center: ${CX.toFixed(2)}, ${CY.toFixed(2)}\n
Min X: ${xMin.toFixed(2)}\n
Max X: ${xMax.toFixed(2)}\n
Min Y: ${yMin.toFixed(2)}\n
Max Y: ${yMax.toFixed(2)}
`.replace(/\s{2,}/g, "\n");
return tooltip;
});
marginedSpace.append("g")
.attr("class", "axis axis--y")
.call(d3.axisLeft(yAxis).tickSizeOuter(-width));
svg.append("text")
.attr("transform", "rotate(-90)")
.attr("y", -margin.left)
.attr("x", -(height / 2))
.attr("dy", "1em")
.style("text-anchor", "middle")
.text(yLabel);
marginedSpace.append("g")
.attr("class", "axis axis--x")
.attr("transform", "translate(0," + height + ")")
.call(d3.axisBottom(xAxis).tickSizeOuter(-height));
svg.append("text")
.attr("x", width / 2)
.attr("y", dimension)
.style("text-anchor", "middle")
.text(xLabel);
return svg._groups[0][0].outerHTML;
}
/**
* Hex Bin chart operation.
*
* @param {Object[]} - centres
* @param {number} - radius
* @returns {Object[]}
*/
getEmptyHexagons(centres, radius) {
const emptyCentres = [],
boundingRect = [d3.extent(centres, d => d.x), d3.extent(centres, d => d.y)],
hexagonCenterToEdge = Math.cos(2 * Math.PI / 12) * radius,
hexagonEdgeLength = Math.sin(2 * Math.PI / 12) * radius;
let indent = false;
for (let y = boundingRect[1][0]; y <= boundingRect[1][1] + radius; y += hexagonEdgeLength + radius) {
for (let x = boundingRect[0][0]; x <= boundingRect[0][1] + radius; x += 2 * hexagonCenterToEdge) {
let cx = x;
const cy = y;
if (indent && x >= boundingRect[0][1]) break;
if (indent) cx += hexagonCenterToEdge;
emptyCentres.push({x: cx, y: cy});
}
indent = !indent;
}
return emptyCentres;
}
}
export default HexDensityChart;

View File

@@ -0,0 +1,107 @@
/**
* @author George O [georgeomnet+cyberchef@gmail.com]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import Utils from "../Utils";
/**
* Index of Coincidence operation
*/
class IndexOfCoincidence extends Operation {
/**
* IndexOfCoincidence constructor
*/
constructor() {
super();
this.name = "Index of Coincidence";
this.module = "Default";
this.description = "Index of Coincidence (IC) is the probability of two randomly selected characters being the same. This can be used to determine whether text is readable or random, with English text having an IC of around 0.066. IC can therefore be a sound method to automate frequency analysis.";
this.infoURL = "https://wikipedia.org/wiki/Index_of_coincidence";
this.inputType = "string";
this.outputType = "number";
this.presentType = "html";
this.args = [];
}
/**
* @param {string} input
* @param {Object[]} args
* @returns {number}
*/
run(input, args) {
const text = input.toLowerCase().replace(/[^a-z]/g, ""),
frequencies = new Array(26).fill(0),
alphabet = Utils.expandAlphRange("a-z");
let coincidence = 0.00,
density = 0.00,
result = 0.00,
i;
for (i=0; i < alphabet.length; i++) {
frequencies[i] = text.count(alphabet[i]);
}
for (i=0; i < frequencies.length; i++) {
coincidence += frequencies[i] * (frequencies[i] - 1);
}
density = frequencies.sum();
// Ensure that we don't divide by 0
if (density < 2) density = 2;
result = coincidence / (density * (density - 1));
return result;
}
/**
* Displays the IC as a scale bar for web apps.
*
* @param {number} ic
* @returns {html}
*/
present(ic) {
return `Index of Coincidence: ${ic}
Normalized: ${ic * 26}
<br><canvas id='chart-area'></canvas><br>
- 0 represents complete randomness (all characters are unique), whereas 1 represents no randomness (all characters are identical).
- English text generally has an IC of between 0.67 to 0.78.
- 'Random' text is determined by the probability that each letter occurs the same number of times as another.
The graph shows the IC of the input data. A low IC generally means that the text is random, compressed or encrypted.
<script type='application/javascript'>
var canvas = document.getElementById("chart-area"),
parentRect = canvas.parentNode.getBoundingClientRect(),
ic = ${ic};
canvas.width = parentRect.width * 0.95;
canvas.height = parentRect.height * 0.25;
ic = ic > 0.25 ? 0.25 : ic;
CanvasComponents.drawScaleBar(canvas, ic, 0.25, [
{
label: "English text",
min: 0.05,
max: 0.08
},
{
label: "> 0.25",
min: 0.24,
max: 0.25
}
]);
</script>
`;
}
}
export default IndexOfCoincidence;

View File

@@ -51,6 +51,10 @@ class JSONToCSV extends Operation {
this.rowDelim = rowDelim;
const self = this;
if (!(input instanceof Array)) {
input = [input];
}
try {
// If the JSON is an array of arrays, this is easy
if (input[0] instanceof Array) {
@@ -89,6 +93,8 @@ class JSONToCSV extends Operation {
* @returns {string}
*/
escapeCellContents(data) {
if (typeof data === "number") data = data.toString();
// Double quotes should be doubled up
data = data.replace(/"/g, '""');

View File

@@ -21,7 +21,7 @@ class JavaScriptParser extends Operation {
this.name = "JavaScript Parser";
this.module = "Code";
this.description = "Returns an Abstract Syntax Tree for valid JavaScript code.";
this.infoURL = "https://en.wikipedia.org/wiki/Abstract_syntax_tree";
this.infoURL = "https://wikipedia.org/wiki/Abstract_syntax_tree";
this.inputType = "string";
this.outputType = "string";
this.args = [

View File

@@ -21,7 +21,7 @@ class PEMToHex extends Operation {
this.name = "PEM to Hex";
this.module = "PublicKey";
this.description = "Converts PEM (Privacy Enhanced Mail) format to a hexadecimal DER (Distinguished Encoding Rules) string.";
this.infoURL = "https://en.wikipedia.org/wiki/X.690#DER_encoding";
this.infoURL = "https://wikipedia.org/wiki/X.690#DER_encoding";
this.inputType = "string";
this.outputType = "string";
this.args = [];

View File

@@ -0,0 +1,46 @@
/**
* @author GCHQ Contributor [3]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import OperationError from "../errors/OperationError";
import Protobuf from "../lib/Protobuf";
/**
* Protobuf Decode operation
*/
class ProtobufDecode extends Operation {
/**
* ProtobufDecode constructor
*/
constructor() {
super();
this.name = "Protobuf Decode";
this.module = "Default";
this.description = "Decodes any Protobuf encoded data to a JSON representation of the data using the field number as the field key.";
this.infoURL = "https://wikipedia.org/wiki/Protocol_Buffers";
this.inputType = "byteArray";
this.outputType = "JSON";
this.args = [];
}
/**
* @param {byteArray} input
* @param {Object[]} args
* @returns {JSON}
*/
run(input, args) {
try {
return Protobuf.decode(input);
} catch (err) {
throw new OperationError(err);
}
}
}
export default ProtobufDecode;

View File

@@ -230,6 +230,7 @@ function regexHighlight (input, regex, displayTotal) {
title = "",
hl = 1,
total = 0;
const captureGroups = [];
output = input.replace(regex, (match, ...args) => {
args.pop(); // Throw away full string
@@ -247,9 +248,15 @@ function regexHighlight (input, regex, displayTotal) {
// Switch highlight
hl = hl === 1 ? 2 : 1;
total++;
// Store highlighted match and replace with a placeholder
captureGroups.push(`<span class='hl${hl}' title='${title}'>${Utils.escapeHtml(match)}</span>`);
return `[cc_capture_group_${total++}]`;
});
return `<span class='hl${hl}' title='${title}'>${Utils.escapeHtml(match)}</span>`;
// Safely escape all remaining text, then replace placeholders
output = Utils.escapeHtml(output);
output = output.replace(/\[cc_capture_group_(\d+)\]/g, (_, i) => {
return captureGroups[i];
});
if (displayTotal)

View File

@@ -0,0 +1,199 @@
/**
* @author tlwr [toby@toby.codes]
* @author Matt C [me@mitt.dev]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import * as d3temp from "d3";
import * as nodomtemp from "nodom";
import { getScatterValues, getScatterValuesWithColour, RECORD_DELIMITER_OPTIONS, COLOURS, FIELD_DELIMITER_OPTIONS } from "../lib/Charts";
import Operation from "../Operation";
import Utils from "../Utils";
const d3 = d3temp.default ? d3temp.default : d3temp;
const nodom = nodomtemp.default ? nodomtemp.default: nodomtemp;
/**
* Scatter chart operation
*/
class ScatterChart extends Operation {
/**
* ScatterChart constructor
*/
constructor() {
super();
this.name = "Scatter chart";
this.module = "Charts";
this.description = "Plots two-variable data as single points on a graph.";
this.infoURL = "https://wikipedia.org/wiki/Scatter_plot";
this.inputType = "string";
this.outputType = "html";
this.args = [
{
name: "Record delimiter",
type: "option",
value: RECORD_DELIMITER_OPTIONS,
},
{
name: "Field delimiter",
type: "option",
value: FIELD_DELIMITER_OPTIONS,
},
{
name: "Use column headers as labels",
type: "boolean",
value: true,
},
{
name: "X label",
type: "string",
value: "",
},
{
name: "Y label",
type: "string",
value: "",
},
{
name: "Colour",
type: "string",
value: COLOURS.max,
},
{
name: "Point radius",
type: "number",
value: 10,
},
{
name: "Use colour from third column",
type: "boolean",
value: false,
}
];
}
/**
* Scatter chart operation.
*
* @param {string} input
* @param {Object[]} args
* @returns {html}
*/
run(input, args) {
const recordDelimiter = Utils.charRep(args[0]),
fieldDelimiter = Utils.charRep(args[1]),
columnHeadingsAreIncluded = args[2],
fillColour = args[5],
radius = args[6],
colourInInput = args[7],
dimension = 500;
let xLabel = args[3],
yLabel = args[4];
const dataFunction = colourInInput ? getScatterValuesWithColour : getScatterValues;
const { headings, values } = dataFunction(
input,
recordDelimiter,
fieldDelimiter,
columnHeadingsAreIncluded
);
if (headings) {
xLabel = headings.x;
yLabel = headings.y;
}
const document = new nodom.Document();
let svg = document.createElement("svg");
svg = d3.select(svg)
.attr("width", "100%")
.attr("height", "100%")
.attr("viewBox", `0 0 ${dimension} ${dimension}`);
const margin = {
top: 10,
right: 0,
bottom: 40,
left: 30,
},
width = dimension - margin.left - margin.right,
height = dimension - margin.top - margin.bottom,
marginedSpace = svg.append("g")
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
const xExtent = d3.extent(values, d => d[0]),
xDelta = xExtent[1] - xExtent[0],
yExtent = d3.extent(values, d => d[1]),
yDelta = yExtent[1] - yExtent[0],
xAxis = d3.scaleLinear()
.domain([xExtent[0] - (0.1 * xDelta), xExtent[1] + (0.1 * xDelta)])
.range([0, width]),
yAxis = d3.scaleLinear()
.domain([yExtent[0] - (0.1 * yDelta), yExtent[1] + (0.1 * yDelta)])
.range([height, 0]);
marginedSpace.append("clipPath")
.attr("id", "clip")
.append("rect")
.attr("width", width)
.attr("height", height);
marginedSpace.append("g")
.attr("class", "points")
.attr("clip-path", "url(#clip)")
.selectAll("circle")
.data(values)
.enter()
.append("circle")
.attr("cx", (d) => xAxis(d[0]))
.attr("cy", (d) => yAxis(d[1]))
.attr("r", d => radius)
.attr("fill", d => {
return colourInInput ? d[2] : fillColour;
})
.attr("stroke", "rgba(0, 0, 0, 0.5)")
.attr("stroke-width", "0.5")
.append("title")
.text(d => {
const x = d[0],
y = d[1],
tooltip = `X: ${x}\n
Y: ${y}\n
`.replace(/\s{2,}/g, "\n");
return tooltip;
});
marginedSpace.append("g")
.attr("class", "axis axis--y")
.call(d3.axisLeft(yAxis).tickSizeOuter(-width));
svg.append("text")
.attr("transform", "rotate(-90)")
.attr("y", -margin.left)
.attr("x", -(height / 2))
.attr("dy", "1em")
.style("text-anchor", "middle")
.text(yLabel);
marginedSpace.append("g")
.attr("class", "axis axis--x")
.attr("transform", "translate(0," + height + ")")
.call(d3.axisBottom(xAxis).tickSizeOuter(-height));
svg.append("text")
.attr("x", width / 2)
.attr("y", dimension)
.style("text-anchor", "middle")
.text(xLabel);
return svg._groups[0][0].outerHTML;
}
}
export default ScatterChart;

View File

@@ -0,0 +1,227 @@
/**
* @author tlwr [toby@toby.codes]
* @author Matt C [me@mitt.dev]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import * as d3temp from "d3";
import * as nodomtemp from "nodom";
import { getSeriesValues, RECORD_DELIMITER_OPTIONS, FIELD_DELIMITER_OPTIONS } from "../lib/Charts";
import Operation from "../Operation";
import Utils from "../Utils";
const d3 = d3temp.default ? d3temp.default : d3temp;
const nodom = nodomtemp.default ? nodomtemp.default: nodomtemp;
/**
* Series chart operation
*/
class SeriesChart extends Operation {
/**
* SeriesChart constructor
*/
constructor() {
super();
this.name = "Series chart";
this.module = "Charts";
this.description = "A time series graph is a line graph of repeated measurements taken over regular time intervals.";
this.inputType = "string";
this.outputType = "html";
this.args = [
{
name: "Record delimiter",
type: "option",
value: RECORD_DELIMITER_OPTIONS,
},
{
name: "Field delimiter",
type: "option",
value: FIELD_DELIMITER_OPTIONS,
},
{
name: "X label",
type: "string",
value: "",
},
{
name: "Point radius",
type: "number",
value: 1,
},
{
name: "Series colours",
type: "string",
value: "mediumseagreen, dodgerblue, tomato",
},
];
}
/**
* Series chart operation.
*
* @param {string} input
* @param {Object[]} args
* @returns {html}
*/
run(input, args) {
const recordDelimiter = Utils.charRep(args[0]),
fieldDelimiter = Utils.charRep(args[1]),
xLabel = args[2],
pipRadius = args[3],
seriesColours = args[4].split(","),
svgWidth = 500,
interSeriesPadding = 20,
xAxisHeight = 50,
seriesLabelWidth = 50,
seriesHeight = 100,
seriesWidth = svgWidth - seriesLabelWidth - interSeriesPadding;
const { xValues, series } = getSeriesValues(input, recordDelimiter, fieldDelimiter),
allSeriesHeight = Object.keys(series).length * (interSeriesPadding + seriesHeight),
svgHeight = allSeriesHeight + xAxisHeight + interSeriesPadding;
const document = new nodom.Document();
let svg = document.createElement("svg");
svg = d3.select(svg)
.attr("width", "100%")
.attr("height", "100%")
.attr("viewBox", `0 0 ${svgWidth} ${svgHeight}`);
const xAxis = d3.scalePoint()
.domain(xValues)
.range([0, seriesWidth]);
svg.append("g")
.attr("class", "axis axis--x")
.attr("transform", `translate(${seriesLabelWidth}, ${xAxisHeight})`)
.call(
d3.axisTop(xAxis).tickValues(xValues.filter((x, i) => {
return [0, Math.round(xValues.length / 2), xValues.length -1].indexOf(i) >= 0;
}))
);
svg.append("text")
.attr("x", svgWidth / 2)
.attr("y", xAxisHeight / 2)
.style("text-anchor", "middle")
.text(xLabel);
const tooltipText = {},
tooltipAreaWidth = seriesWidth / xValues.length;
xValues.forEach(x => {
const tooltip = [];
series.forEach(serie => {
const y = serie.data[x];
if (typeof y === "undefined") return;
tooltip.push(`${serie.name}: ${y}`);
});
tooltipText[x] = tooltip.join("\n");
});
const chartArea = svg.append("g")
.attr("transform", `translate(${seriesLabelWidth}, ${xAxisHeight})`);
chartArea
.append("g")
.selectAll("rect")
.data(xValues)
.enter()
.append("rect")
.attr("x", x => {
return xAxis(x) - (tooltipAreaWidth / 2);
})
.attr("y", 0)
.attr("width", tooltipAreaWidth)
.attr("height", allSeriesHeight)
.attr("stroke", "none")
.attr("fill", "transparent")
.append("title")
.text(x => {
return `${x}\n
--\n
${tooltipText[x]}\n
`.replace(/\s{2,}/g, "\n");
});
const yAxesArea = svg.append("g")
.attr("transform", `translate(0, ${xAxisHeight})`);
series.forEach((serie, seriesIndex) => {
const yExtent = d3.extent(Object.values(serie.data)),
yAxis = d3.scaleLinear()
.domain(yExtent)
.range([seriesHeight, 0]);
const seriesGroup = chartArea
.append("g")
.attr("transform", `translate(0, ${seriesHeight * seriesIndex + interSeriesPadding * (seriesIndex + 1)})`);
let path = "";
xValues.forEach((x, xIndex) => {
let nextX = xValues[xIndex + 1],
y = serie.data[x],
nextY= serie.data[nextX];
if (typeof y === "undefined" || typeof nextY === "undefined") return;
x = xAxis(x); nextX = xAxis(nextX);
y = yAxis(y); nextY = yAxis(nextY);
path += `M ${x} ${y} L ${nextX} ${nextY} z `;
});
seriesGroup
.append("path")
.attr("d", path)
.attr("fill", "none")
.attr("stroke", seriesColours[seriesIndex % seriesColours.length])
.attr("stroke-width", "1");
xValues.forEach(x => {
const y = serie.data[x];
if (typeof y === "undefined") return;
seriesGroup
.append("circle")
.attr("cx", xAxis(x))
.attr("cy", yAxis(y))
.attr("r", pipRadius)
.attr("fill", seriesColours[seriesIndex % seriesColours.length])
.append("title")
.text(d => {
return `${x}\n
--\n
${tooltipText[x]}\n
`.replace(/\s{2,}/g, "\n");
});
});
yAxesArea
.append("g")
.attr("transform", `translate(${seriesLabelWidth - interSeriesPadding}, ${seriesHeight * seriesIndex + interSeriesPadding * (seriesIndex + 1)})`)
.attr("class", "axis axis--y")
.call(d3.axisLeft(yAxis).ticks(5));
yAxesArea
.append("g")
.attr("transform", `translate(0, ${seriesHeight / 2 + seriesHeight * seriesIndex + interSeriesPadding * (seriesIndex + 1)})`)
.append("text")
.style("text-anchor", "middle")
.attr("transform", "rotate(-90)")
.text(serie.name);
});
return svg._groups[0][0].outerHTML;
}
}
export default SeriesChart;

View File

@@ -79,7 +79,7 @@ class TextEncodingBruteForce extends Operation {
let table = "<table class='table table-hover table-sm table-bordered table-nonfluid'><tr><th>Encoding</th><th>Value</th></tr>";
for (const enc in encodings) {
const value = Utils.printable(encodings[enc], true);
const value = Utils.escapeHtml(Utils.printable(encodings[enc], true));
table += `<tr><td>${enc}</td><td>${value}</td></tr>`;
}

View File

@@ -20,7 +20,7 @@ class UnescapeString extends Operation {
this.name = "Unescape string";
this.module = "Default";
this.description = "Unescapes characters in a string that have been escaped. For example, <code>Don\\'t stop me now</code> becomes <code>Don't stop me now</code>.<br><br>Supports the following escape sequences:<ul><li><code>\\n</code> (Line feed/newline)</li><li><code>\\r</code> (Carriage return)</li><li><code>\\t</code> (Horizontal tab)</li><li><code>\\b</code> (Backspace)</li><li><code>\\f</code> (Form feed)</li><li><code>\\xnn</code> (Hex, where n is 0-f)</li><li><code>\\\\</code> (Backslash)</li><li><code>\\'</code> (Single quote)</li><li><code>\\&quot;</code> (Double quote)</li><li><code>\\unnnn</code> (Unicode character)</li><li><code>\\u{nnnnnn}</code> (Unicode code point)</li></ul>";
this.description = "Unescapes characters in a string that have been escaped. For example, <code>Don\\'t stop me now</code> becomes <code>Don't stop me now</code>.<br><br>Supports the following escape sequences:<ul><li><code>\\n</code> (Line feed/newline)</li><li><code>\\r</code> (Carriage return)</li><li><code>\\t</code> (Horizontal tab)</li><li><code>\\b</code> (Backspace)</li><li><code>\\f</code> (Form feed)</li><li><code>\\nnn</code> (Octal, where n is 0-7)</li><li><code>\\xnn</code> (Hex, where n is 0-f)</li><li><code>\\\\</code> (Backslash)</li><li><code>\\'</code> (Single quote)</li><li><code>\\&quot;</code> (Double quote)</li><li><code>\\unnnn</code> (Unicode character)</li><li><code>\\u{nnnnnn}</code> (Unicode code point)</li></ul>";
this.infoURL = "https://wikipedia.org/wiki/Escape_sequence";
this.inputType = "string";
this.outputType = "string";

View File

@@ -0,0 +1,46 @@
/**
* @author GCHQ Contributor [3]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import OperationError from "../errors/OperationError";
import Protobuf from "../lib/Protobuf";
/**
* VarInt Decode operation
*/
class VarIntDecode extends Operation {
/**
* VarIntDecode constructor
*/
constructor() {
super();
this.name = "VarInt Decode";
this.module = "Default";
this.description = "Decodes a VarInt encoded integer. VarInt is an efficient way of encoding variable length integers and is commonly used with Protobuf.";
this.infoURL = "https://developers.google.com/protocol-buffers/docs/encoding#varints";
this.inputType = "byteArray";
this.outputType = "number";
this.args = [];
}
/**
* @param {byteArray} input
* @param {Object[]} args
* @returns {number}
*/
run(input, args) {
try {
return Protobuf.varIntDecode(input);
} catch (err) {
throw new OperationError(err);
}
}
}
export default VarIntDecode;

View File

@@ -0,0 +1,46 @@
/**
* @author GCHQ Contributor [3]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import OperationError from "../errors/OperationError";
import Protobuf from "../lib/Protobuf";
/**
* VarInt Encode operation
*/
class VarIntEncode extends Operation {
/**
* VarIntEncode constructor
*/
constructor() {
super();
this.name = "VarInt Encode";
this.module = "Default";
this.description = "Encodes a Vn integer as a VarInt. VarInt is an efficient way of encoding variable length integers and is commonly used with Protobuf.";
this.infoURL = "https://developers.google.com/protocol-buffers/docs/encoding#varints";
this.inputType = "number";
this.outputType = "byteArray";
this.args = [];
}
/**
* @param {number} input
* @param {Object[]} args
* @returns {byteArray}
*/
run(input, args) {
try {
return Protobuf.varIntEncode(input);
} catch (err) {
throw new OperationError(err);
}
}
}
export default VarIntEncode;

View File

@@ -1,265 +0,0 @@
/** @license
========================================================================
bzip2.js - a small bzip2 decompression implementation
Copyright 2011 by antimatter15 (antimatter15@gmail.com)
Based on micro-bunzip by Rob Landley (rob@landley.net).
Copyright (c) 2011 by antimatter15 (antimatter15@gmail.com).
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH
THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
"use strict";
var bzip2 = {};
bzip2.array = function(bytes){
var bit = 0, byte = 0;
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF ];
return function(n){
var result = 0;
while(n > 0){
var left = 8 - bit;
if(n >= left){
result <<= left;
result |= (BITMASK[left] & bytes[byte++]);
bit = 0;
n -= left;
}else{
result <<= n;
result |= ((bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit));
bit += n;
n = 0;
}
}
return result
}
}
bzip2.simple = function(bits){
var size = bzip2.header(bits);
var all = '', chunk = '';
do{
all += chunk;
chunk = bzip2.decompress(bits, size);
}while(chunk != -1);
return all;
}
bzip2.header = function(bits){
if(bits(8*3) != 4348520) throw "No magic number found";
var i = bits(8) - 48;
if(i < 1 || i > 9) throw "Not a BZIP archive";
return i;
};
//takes a function for reading the block data (starting with 0x314159265359)
//a block size (0-9) (optional, defaults to 9)
//a length at which to stop decompressing and return the output
bzip2.decompress = function(bits, size, len){
var MAX_HUFCODE_BITS = 20;
var MAX_SYMBOLS = 258;
var SYMBOL_RUNA = 0;
var SYMBOL_RUNB = 1;
var GROUP_SIZE = 50;
var bufsize = 100000 * size;
for(var h = '', i = 0; i < 6; i++) h += bits(8).toString(16);
if(h == "177245385090") return -1; //last block
if(h != "314159265359") throw "Not valid bzip data";
bits(32); //ignore CRC codes
if(bits(1)) throw "Unsupported obsolete version";
var origPtr = bits(24);
if(origPtr > bufsize) throw "Initial position larger than buffer size";
var t = bits(16);
var symToByte = new Uint8Array(256),
symTotal = 0;
for (i = 0; i < 16; i++) {
if(t & (1 << (15 - i))) {
var k = bits(16);
for(j = 0; j < 16; j++){
if(k & (1 << (15 - j))){
symToByte[symTotal++] = (16 * i) + j;
}
}
}
}
var groupCount = bits(3);
if(groupCount < 2 || groupCount > 6) throw "Error 1";
var nSelectors = bits(15);
if(nSelectors == 0) throw "Error";
var mtfSymbol = []; //TODO: possibly replace JS array with typed arrays
for(var i = 0; i < groupCount; i++) mtfSymbol[i] = i;
var selectors = new Uint8Array(32768);
for(var i = 0; i < nSelectors; i++){
for(var j = 0; bits(1); j++) if(j >= groupCount) throw "Error 2";
var uc = mtfSymbol[j];
mtfSymbol.splice(j, 1); //this is a probably inefficient MTF transform
mtfSymbol.splice(0, 0, uc);
selectors[i] = uc;
}
var symCount = symTotal + 2;
var groups = [];
for(var j = 0; j < groupCount; j++){
var length = new Uint8Array(MAX_SYMBOLS),
temp = new Uint8Array(MAX_HUFCODE_BITS+1);
t = bits(5); //lengths
for(var i = 0; i < symCount; i++){
while(true){
if (t < 1 || t > MAX_HUFCODE_BITS) throw "Error 3";
if(!bits(1)) break;
if(!bits(1)) t++;
else t--;
}
length[i] = t;
}
var minLen, maxLen;
minLen = maxLen = length[0];
for(var i = 1; i < symCount; i++){
if(length[i] > maxLen) maxLen = length[i];
else if(length[i] < minLen) minLen = length[i];
}
var hufGroup;
hufGroup = groups[j] = {};
hufGroup.permute = new Uint32Array(MAX_SYMBOLS);
hufGroup.limit = new Uint32Array(MAX_HUFCODE_BITS + 1);
hufGroup.base = new Uint32Array(MAX_HUFCODE_BITS + 1);
hufGroup.minLen = minLen;
hufGroup.maxLen = maxLen;
var base = hufGroup.base.subarray(1);
var limit = hufGroup.limit.subarray(1);
var pp = 0;
for(var i = minLen; i <= maxLen; i++)
for(var t = 0; t < symCount; t++)
if(length[t] == i) hufGroup.permute[pp++] = t;
for(i = minLen; i <= maxLen; i++) temp[i] = limit[i] = 0;
for(i = 0; i < symCount; i++) temp[length[i]]++;
pp = t = 0;
for(i = minLen; i < maxLen; i++) {
pp += temp[i];
limit[i] = pp - 1;
pp <<= 1;
base[i+1] = pp - (t += temp[i]);
}
limit[maxLen]=pp+temp[maxLen]-1;
base[minLen]=0;
}
var byteCount = new Uint32Array(256);
for(var i = 0; i < 256; i++) mtfSymbol[i] = i;
var runPos, count, symCount, selector;
runPos = count = symCount = selector = 0;
var buf = new Uint32Array(bufsize);
while(true){
if(!(symCount--)){
symCount = GROUP_SIZE - 1;
if(selector >= nSelectors) throw "Error 4";
hufGroup = groups[selectors[selector++]];
base = hufGroup.base.subarray(1);
limit = hufGroup.limit.subarray(1);
}
i = hufGroup.minLen;
j = bits(i);
while(true){
if(i > hufGroup.maxLen) throw "Error 5";
if(j <= limit[i]) break;
i++;
j = (j << 1) | bits(1);
}
j -= base[i];
if(j < 0 || j >= MAX_SYMBOLS) throw "Error 6";
var nextSym = hufGroup.permute[j];
if (nextSym == SYMBOL_RUNA || nextSym == SYMBOL_RUNB) {
if(!runPos){
runPos = 1;
t = 0;
}
if(nextSym == SYMBOL_RUNA) t += runPos;
else t += 2 * runPos;
runPos <<= 1;
continue;
}
if(runPos){
runPos = 0;
if(count + t >= bufsize) throw "Error 7";
uc = symToByte[mtfSymbol[0]];
byteCount[uc] += t;
while(t--) buf[count++] = uc;
}
if(nextSym > symTotal) break;
if(count >= bufsize) throw "Error 8";
i = nextSym -1;
uc = mtfSymbol[i];
mtfSymbol.splice(i, 1);
mtfSymbol.splice(0, 0, uc);
uc = symToByte[uc];
byteCount[uc]++;
buf[count++] = uc;
}
if(origPtr < 0 || origPtr >= count) throw "Error 9";
var j = 0;
for(var i = 0; i < 256; i++){
k = j + byteCount[i];
byteCount[i] = j;
j = k;
}
for(var i = 0; i < count; i++){
uc = buf[i] & 0xff;
buf[byteCount[uc]] |= (i << 8);
byteCount[uc]++;
}
var pos = 0, current = 0, run = 0;
if(count) {
pos = buf[origPtr];
current = (pos & 0xff);
pos >>= 8;
run = -1;
}
count = count;
var output = '';
var copies, previous, outbyte;
if(!len) len = Infinity;
while(count){
count--;
previous = current;
pos = buf[pos];
current = pos & 0xff;
pos >>= 8;
if(run++ == 3){
copies = current;
outbyte = previous;
current = -1;
}else{
copies = 1;
outbyte = current;
}
while(copies--){
output += (String.fromCharCode(outbyte));
if(!--len) return output;
}
if(current != previous) run = 0;
}
return output;
}
export default bzip2;

View File

@@ -5,7 +5,6 @@
* @copyright Crown Copyright 2017
* @license Apache-2.0
*/
import "babel-polyfill";
// Define global environment functions
global.ENVIRONMENT_IS_WORKER = function() {

View File

@@ -108,7 +108,7 @@ class App {
handleError(err, logToConsole) {
if (logToConsole) log.error(err);
const msg = err.displayStr || err.toString();
this.alert(msg, this.options.errorTimeout, !this.options.showErrors);
this.alert(Utils.escapeHtml(msg), this.options.errorTimeout, !this.options.showErrors);
}

View File

@@ -338,7 +338,7 @@ class ControlsWaiter {
const saveLink = this.generateStateUrl(true, true, null, "https://gchq.github.io/CyberChef/");
if (reportBugInfo) {
reportBugInfo.innerHTML = `* Version: ${PKG_VERSION + (typeof INLINE === "undefined" ? "" : "s")}
reportBugInfo.innerHTML = `* Version: ${PKG_VERSION}
* Compile time: ${COMPILE_TIME}
* User-Agent:
${navigator.userAgent}

View File

@@ -293,7 +293,9 @@ class HTMLIngredient {
const op = el.parentNode.parentNode;
const target = op.querySelectorAll(".arg")[this.target];
target.value = el.childNodes[el.selectedIndex].getAttribute("populate-value");
const popVal = el.childNodes[el.selectedIndex].getAttribute("populate-value");
if (popVal !== "") target.value = popVal;
const evt = new Event("change");
target.dispatchEvent(evt);

View File

@@ -124,16 +124,21 @@ class RecipeWaiter {
* @param {event} evt
*/
opSortEnd(evt) {
if (this.removeIntent) {
if (evt.item.parentNode.id === "rec-list") {
evt.item.remove();
}
if (this.removeIntent && evt.item.parentNode.id === "rec-list") {
evt.item.remove();
return;
}
// Reinitialise the popover on the original element in the ops list because for some reason it
// gets destroyed and recreated.
this.manager.ops.enableOpsListPopovers(evt.clone);
// gets destroyed and recreated. If the clone isn't in the ops list, we use the original item instead.
let enableOpsElement;
if (evt.clone.parentNode && evt.clone.parentNode.classList.contains("op-list")) {
enableOpsElement = evt.clone;
} else {
enableOpsElement = evt.item;
$(evt.item).attr("data-toggle", "popover");
}
this.manager.ops.enableOpsListPopovers(enableOpsElement);
if (evt.item.parentNode.id !== "rec-list") {
return;

View File

@@ -81,6 +81,10 @@ class SeasonalWaiter {
</div>`;
optionsBody.appendChild(optionItem);
if (!this.app.options.hasOwnProperty("clippy")) {
this.app.options.clippy = true;
}
this.manager.options.load();
}
@@ -95,7 +99,7 @@ class SeasonalWaiter {
}
if (!this.app.options.clippy) {
this.clippyTimeouts.forEach(t => clearTimeout(t));
if (this.clippyTimeouts) this.clippyTimeouts.forEach(t => clearTimeout(t));
return;
}

View File

@@ -131,13 +131,6 @@
};
window.addEventListener("error", loadingErrorHandler);
</script>
<% if (htmlWebpackPlugin.options.inline) { %>
<meta name="robots" content="noindex" />
<% } else { %>
<script type="application/ld+json">
<% print(JSON.stringify(require("../static/structuredData.json"))); %>
</script>
<% } %>
</head>
<body>
<!-- Preloader overlay -->
@@ -153,11 +146,7 @@
<div id="content-wrapper">
<div id="banner" class="row">
<div class="col" style="text-align: left; padding-left: 10px;">
<% if (htmlWebpackPlugin.options.inline) { %>
<span>Version <%= htmlWebpackPlugin.options.version %></span>
<% } else { %>
<a href="cyberchef.htm" download>Download CyberChef <i class="material-icons">file_download</i></a>
<% } %>
<a href="CyberChef_v<%= htmlWebpackPlugin.options.version %>.zip" download>Download CyberChef <i class="material-icons">file_download</i></a>
</div>
<div class="col-md-6" id="notice-wrapper">
<span id="notice">

View File

@@ -8,7 +8,6 @@
import "./stylesheets/index.js";
// Libs
import "babel-polyfill";
import "arrive";
import "snackbarjs";
import "bootstrap-material-design";

View File

@@ -86,7 +86,7 @@ div.toggle-string {
}
.operation .form-control {
padding: 20px 12px 6px 12px;
padding: 20px 12px 6px 12px !important;
border-top-left-radius: 4px;
border-top-right-radius: 4px;
background-image: none;

View File

@@ -10,7 +10,6 @@
* @copyright Crown Copyright 2017
* @license Apache-2.0
*/
import "babel-polyfill";
// Define global environment functions
global.ENVIRONMENT_IS_WORKER = function() {
@@ -33,6 +32,7 @@ import "./tests/BitwiseOp";
import "./tests/ByteRepr";
import "./tests/CartesianProduct";
import "./tests/CharEnc";
import "./tests/Charts";
import "./tests/Checksum";
import "./tests/Ciphers";
import "./tests/Code";
@@ -49,9 +49,11 @@ import "./tests/Hash";
import "./tests/HaversineDistance";
import "./tests/Hexdump";
import "./tests/Image";
import "./tests/IndexOfCoincidence";
import "./tests/Jump";
import "./tests/JSONBeautify";
import "./tests/JSONMinify";
import "./tests/JSONtoCSV";
import "./tests/JWTDecode";
import "./tests/JWTSign";
import "./tests/JWTVerify";
@@ -87,6 +89,9 @@ import "./tests/Enigma";
import "./tests/Bombe";
import "./tests/MultipleBombe";
import "./tests/Typex";
import "./tests/BLAKE2b";
import "./tests/BLAKE2s";
import "./tests/Protobuf";
// Cannot test operations that use the File type yet
//import "./tests/SplitColourChannels";

View File

@@ -0,0 +1,56 @@
/**
* BitwiseOp tests
*
* @author h345983745
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import TestRegister from "../TestRegister";
TestRegister.addTests([
{
name: "BLAKE2b: 512 - Hello World",
input: "Hello World",
expectedOutput: "4386a08a265111c9896f56456e2cb61a64239115c4784cf438e36cc851221972da3fb0115f73cd02486254001f878ab1fd126aac69844ef1c1ca152379d0a9bd",
recipeConfig: [
{ "op": "BLAKE2b",
"args": ["512", "Hex", {string: "", option: "UTF8"}] }
]
},
{
name: "BLAKE2b: 384 - Hello World",
input: "Hello World",
expectedOutput: "4d388e82ca8f866e606b6f6f0be910abd62ad6e98c0adfc27cf35acf948986d5c5b9c18b6f47261e1e679eb98edf8e2d",
recipeConfig: [
{ "op": "BLAKE2b",
"args": ["384", "Hex", {string: "", option: "UTF8"}] }
]
},
{
name: "BLAKE2b: 256 - Hello World",
input: "Hello World",
expectedOutput: "1dc01772ee0171f5f614c673e3c7fa1107a8cf727bdf5a6dadb379e93c0d1d00",
recipeConfig: [
{ "op": "BLAKE2b",
"args": ["256", "Hex", {string: "", option: "UTF8"}] }
]
},
{
name: "BLAKE2b: 160 - Hello World",
input: "Hello World",
expectedOutput: "6a8489e6fd6e51fae12ab271ec7fc8134dd5d737",
recipeConfig: [
{ "op": "BLAKE2b",
"args": ["160", "Hex", {string: "", option: "UTF8"}] }
]
},
{
name: "BLAKE2b: Key Test",
input: "message data",
expectedOutput: "3d363ff7401e02026f4a4687d4863ced",
recipeConfig: [
{ "op": "BLAKE2b",
"args": ["128", "Hex", {string: "pseudorandom key", option: "UTF8"}] }
]
}
]);

View File

@@ -0,0 +1,47 @@
/**
* BitwiseOp tests
*
* @author h345983745
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import TestRegister from "../TestRegister";
TestRegister.addTests([
{
name: "BLAKE2s: 256 - Hello World",
input: "Hello World",
expectedOutput: "7706af019148849e516f95ba630307a2018bb7bf03803eca5ed7ed2c3c013513",
recipeConfig: [
{ "op": "BLAKE2s",
"args": ["256", "Hex", {string: "", option: "UTF8"}] }
]
},
{
name: "BLAKE2s: 160 - Hello World",
input: "Hello World",
expectedOutput: "0e4fcfc2ee0097ac1d72d70b595a39e09a3c7c7e",
recipeConfig: [
{ "op": "BLAKE2s",
"args": ["160", "Hex", {string: "", option: "UTF8"}] }
]
},
{
name: "BLAKE2s: 128 - Hello World",
input: "Hello World",
expectedOutput: "9964ee6f36126626bf864363edfa96f6",
recipeConfig: [
{ "op": "BLAKE2s",
"args": ["128", "Hex", {string: "", option: "UTF8"}] }
]
},
{
name: "BLAKE2s: Key Test",
input: "Hello World",
expectedOutput: "9964ee6f36126626bf864363edfa96f6",
recipeConfig: [
{ "op": "BLAKE2s",
"args": ["128", "Hex", {string: "", option: "UTF8"}] }
]
}
]);

View File

@@ -0,0 +1,55 @@
/**
* Chart tests.
*
* @author Matt C [me@mitt.dev]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import TestRegister from "../TestRegister";
TestRegister.addTests([
{
name: "Scatter chart",
input: "100 100\n200 200\n300 300\n400 400\n500 500",
expectedMatch: /^<svg width/,
recipeConfig: [
{
"op": "Scatter chart",
"args": ["Line feed", "Space", false, "time", "stress", "black", 5, false]
}
],
},
{
name: "Hex density chart",
input: "100 100\n200 200\n300 300\n400 400\n500 500",
expectedMatch: /^<svg width/,
recipeConfig: [
{
"op": "Hex Density chart",
"args": ["Line feed", "Space", 25, 15, true, "", "", true, "white", "black", true]
}
],
},
{
name: "Series chart",
input: "100 100 100\n200 200 200\n300 300 300\n400 400 400\n500 500 500",
expectedMatch: /^<svg width/,
recipeConfig: [
{
"op": "Series chart",
"args": ["Line feed", "Space", "", 1, "mediumseagreen, dodgerblue, tomato"]
}
],
},
{
name: "Heatmap chart",
input: "100 100\n200 200\n300 300\n400 400\n500 500",
expectedMatch: /^<svg width/,
recipeConfig: [
{
"op": "Heatmap chart",
"args": ["Line feed", "Space", 25, 25, true, "", "", false, "white", "black"]
}
],
},
]);

View File

@@ -209,9 +209,9 @@ Tag: 16a3e732a605cc9ca29108f742ca0743`,
{
name: "AES Encrypt: AES-128-GCM, Binary",
input: "7a0e643132750e96d805d11e9e48e281fa39a41039286423cc1c045e5442b40bf1c3f2822bded3f9c8ef11cb25da64dda9c7ab87c246bd305385150c98f31465c2a6180fe81d31ea289b916504d5a12e1de26cb10adba84a0cb0c86f94bc14bc554f3018",
expectedOutput: `fa17fcbf5e8763322c1b0c8562e1512ed9d702ef70c1643572b9de3e34ae6b535e6c1b992432aa6d06fb6f80c861262aef66e7c26035afe77bd3861261e4e092b523f058f8ebef2143db21bc16d02f7a011efb07419300cb41c3b884d1d8d6a766b8963c
expectedOutput: `5a29debb5c5f38cdf8aee421bd94dbbf3399947faddf205f88b3ad8ecb0c51214ec0e28bf78942dfa212d7eb15259bbdcac677b4c05f473eeb9331d74f31d441d97d56eb5c73b586342d72128ca528813543dc0fc7eddb7477172cc9194c18b2e1383e4e
Tag: fa6bbb34c8cde65a3d7b93fb094fc84f`,
Tag: 70fad2ca19412c20f40fd06918736e56`,
recipeConfig: [
{
"op": "AES Encrypt",
@@ -301,9 +301,9 @@ Tag: fa6bbb34c8cde65a3d7b93fb094fc84f`,
{
name: "AES Encrypt: AES-192-GCM, Binary",
input: "7a0e643132750e96d805d11e9e48e281fa39a41039286423cc1c045e5442b40bf1c3f2822bded3f9c8ef11cb25da64dda9c7ab87c246bd305385150c98f31465c2a6180fe81d31ea289b916504d5a12e1de26cb10adba84a0cb0c86f94bc14bc554f3018",
expectedOutput: `ed22946f96964d300b45f5ce2d9601ba87682da1a603c90e6d4f7738729b0602f613ee392c9bfc7792594474f1213fb99185851f02ece4df0e93995e49f97aa4d0a337d7a80d83e4219dae5a3d36658f8659cdd5ed7c32707f98656fab7fb43f7a61e37c
expectedOutput: `318b479d919d506f0cd904f2676fab263a7921b6d7e0514f36e03ae2333b77fa66ef5600babcb2ee9718aeb71fc357412343c1f2cb351d8715bb0aedae4a6468124f9c4aaf6a721b306beddbe63a978bec8baeeba4b663be33ee5bc982746bd4aed1c38b
Tag: be17cb31edb77f648b9d1032b235b33d`,
Tag: 86db597d5302595223cadbd990f1309b`,
recipeConfig: [
{
"op": "AES Encrypt",
@@ -393,9 +393,9 @@ Tag: be17cb31edb77f648b9d1032b235b33d`,
{
name: "AES Encrypt: AES-256-GCM, Binary",
input: "7a0e643132750e96d805d11e9e48e281fa39a41039286423cc1c045e5442b40bf1c3f2822bded3f9c8ef11cb25da64dda9c7ab87c246bd305385150c98f31465c2a6180fe81d31ea289b916504d5a12e1de26cb10adba84a0cb0c86f94bc14bc554f3018",
expectedOutput: `e3f1b236eaf3b9df69df8133a1b417fa42b242d8ad49e4d2f3469aca7e2a41737e4f2c8a0d212143287088fad51743577dc6dfa8ed328ca90113cbeb9b137926b2168cc037bdc371777e6ee02b9d9c017b6054fd83d43b4885fbe9c044a8574f1491a893
expectedOutput: `1287f188ad4d7ab0d9ff69b3c29cb11f861389532d8cb9337181da2e8cfc74a84927e8c0dd7a28a32fd485afe694259a63c199b199b95edd87c7aa95329feac340f2b78b72956a85f367044d821766b1b7135815571df44900695f1518cf3ae38ecb650f
Tag: 23ddbd3ee4de33f98a9ea9a170bdf268`,
Tag: 821b1e5f32dad052e502775a523d957a`,
recipeConfig: [
{
"op": "AES Encrypt",

View File

@@ -0,0 +1,22 @@
/**
* Index of Coincidence tests.
*
* @author George O [georgeomnet+cyberchef@gmail.com]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import TestRegister from "../TestRegister";
TestRegister.addTests([
{
name: "Index of Coincidence",
input: "Hello world, this is a test to determine the correct IC value.",
expectedMatch: /^Index of Coincidence: 0\.07142857142857142\nNormalized: 1\.857142857142857/,
recipeConfig: [
{
"op": "Index of Coincidence",
"args": []
},
],
},
]);

View File

@@ -0,0 +1,93 @@
/**
* JSON to CSV tests.
*
* @author mshwed [m@ttshwed.com]
*
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import TestRegister from "../TestRegister";
const EXPECTED_CSV_SINGLE = "a,b,c\r\n1,2,3\r\n";
const EXPECTED_CSV_MULTIPLE = "a,b,c\r\n1,2,3\r\n1,2,3\r\n";
const EXPECTED_CSV_EMPTY = "\r\n\r\n";
TestRegister.addTests([
{
name: "JSON to CSV: strings as values",
input: JSON.stringify({a: "1", b: "2", c: "3"}),
expectedOutput: EXPECTED_CSV_SINGLE,
recipeConfig: [
{
op: "JSON to CSV",
args: [",", "\\r\\n"]
},
],
},
{
name: "JSON to CSV: numbers as values",
input: JSON.stringify({a: 1, b: 2, c: 3}),
expectedOutput: EXPECTED_CSV_SINGLE,
recipeConfig: [
{
op: "JSON to CSV",
args: [",", "\\r\\n"]
},
],
},
{
name: "JSON to CSV: numbers and strings as values",
input: JSON.stringify({a: 1, b: "2", c: 3}),
expectedOutput: EXPECTED_CSV_SINGLE,
recipeConfig: [
{
op: "JSON to CSV",
args: [",", "\\r\\n"]
},
],
},
{
name: "JSON to CSV: JSON as an array",
input: JSON.stringify([{a: 1, b: "2", c: 3}]),
expectedOutput: EXPECTED_CSV_SINGLE,
recipeConfig: [
{
op: "JSON to CSV",
args: [",", "\\r\\n"]
},
],
},
{
name: "JSON to CSV: multiple JSON values in an array",
input: JSON.stringify([{a: 1, b: "2", c: 3}, {a: 1, b: "2", c: 3}]),
expectedOutput: EXPECTED_CSV_MULTIPLE,
recipeConfig: [
{
op: "JSON to CSV",
args: [",", "\\r\\n"]
},
],
},
{
name: "JSON to CSV: empty JSON",
input: JSON.stringify({}),
expectedOutput: EXPECTED_CSV_EMPTY,
recipeConfig: [
{
op: "JSON to CSV",
args: [",", "\\r\\n"]
},
],
},
{
name: "JSON to CSV: empty JSON in array",
input: JSON.stringify([{}]),
expectedOutput: EXPECTED_CSV_EMPTY,
recipeConfig: [
{
op: "JSON to CSV",
args: [",", "\\r\\n"]
},
],
}
]);

View File

@@ -0,0 +1,36 @@
/**
* Protobuf tests.
*
* @author n1474335 [n1474335@gmail.com]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import TestRegister from "../TestRegister";
TestRegister.addTests([
{
name: "Protobuf Decode",
input: "0d1c0000001203596f751a024d65202b2a0a0a066162633132331200",
expectedOutput: JSON.stringify({
"1": 469762048,
"2": "You",
"3": "Me",
"4": 43,
"5": {
"1": "abc123",
"2": {}
}
}, null, 4),
recipeConfig: [
{
"op": "From Hex",
"args": ["Auto"]
},
{
"op": "Protobuf Decode",
"args": []
}
]
},
]);

View File

@@ -48,7 +48,7 @@ module.exports = {
"process.browser": "true"
}),
new MiniCssExtractPlugin({
filename: "[name].css"
filename: "assets/[name].css"
}),
],
resolve: {
@@ -80,7 +80,12 @@ module.exports = {
{
test: /\.css$/,
use: [
MiniCssExtractPlugin.loader,
{
loader: MiniCssExtractPlugin.loader,
options: {
publicPath: "../"
}
},
"css-loader",
"postcss-loader",
]
@@ -88,7 +93,12 @@ module.exports = {
{
test: /\.scss$/,
use: [
MiniCssExtractPlugin.loader,
{
loader: MiniCssExtractPlugin.loader,
options: {
publicPath: "../"
}
},
"css-loader",
"sass-loader",
]
@@ -97,7 +107,9 @@ module.exports = {
test: /\.(ico|eot|ttf|woff|woff2)$/,
loader: "url-loader",
options: {
limit: 10000
limit: 10000,
name: "[hash].[ext]",
outputPath: "assets"
}
},
{
@@ -120,7 +132,9 @@ module.exports = {
exclude: /web\/static/,
loader: "url-loader",
options: {
limit: 10000
limit: 10000,
name: "[hash].[ext]",
outputPath: "assets"
}
},
]
@@ -133,11 +147,15 @@ module.exports = {
warningsFilter: [
/source-map/,
/dependency is an expression/,
/export 'default'/
/export 'default'/,
/Can't resolve 'sodium'/
],
},
node: {
fs: "empty"
fs: "empty",
"child_process": "empty",
net: "empty",
tls: "empty"
},
performance: {
hints: false