2
0
mirror of https://github.com/gchq/CyberChef synced 2025-12-16 08:13:53 +00:00

Compare commits

..

30 Commits

Author SHA1 Message Date
d98762625
007224c92e 9.20.6 2020-05-27 15:59:42 +01:00
Matthieu
738ee33959 Fix bug in Normalise Unicode operation: replace nfc by nfkc 2020-05-27 15:47:40 +01:00
d98762625
d720a6b250 9.20.5 2020-05-27 15:34:45 +01:00
Matt
5ce3cc17bb Fixed tesseract issue 2020-05-27 15:00:34 +01:00
d98762625
5c35205315 9.20.4 2020-05-27 13:35:20 +01:00
d98762625
10751934e4 add uname dependency and bump chromedriver 2020-05-27 12:28:59 +01:00
d98762625
d658f91106 Merge branch 'master' of github.com:gchq/CyberChef into javascript-minify 2020-05-27 11:54:07 +01:00
n1474335
4c3324aea1 9.20.3 2020-03-30 11:17:46 +01:00
n1474335
ac2fcee90f Merge branch 'Danh4-issue-998' 2020-03-30 11:17:38 +01:00
71819
94e00115fe Issue 998: DishJSON should only replace undefined with new ArrayBuffer not null or false 2020-03-28 20:27:59 +00:00
d98762625
29255d2338 Merge pull request #983 from n1073645/gruntSpelling
Grunt npm tests comment changed to npm test
2020-03-27 14:23:54 +00:00
n1474335
39278cfce7 9.20.2 2020-03-27 12:10:01 +00:00
n1474335
46cc48cfb9 Renamed Parse ObjectID Timestamp operation files 2020-03-27 12:09:57 +00:00
n1474335
eb4009949d 9.20.1 2020-03-27 12:05:41 +00:00
n1474335
57c48a4bd2 Merge branch 'n1073645-TargaExtractor' 2020-03-27 12:05:34 +00:00
n1474335
45011de494 Tidied up TARGE extractor 2020-03-27 12:05:23 +00:00
n1474335
5e51ed0a5f Merge branch 'TargaExtractor' of https://github.com/n1073645/CyberChef into n1073645-TargaExtractor 2020-03-27 12:01:46 +00:00
n1474335
875802ef2a 9.20.0 2020-03-27 12:00:39 +00:00
n1474335
bbc255ef83 Updated CHANGELOG 2020-03-27 12:00:33 +00:00
n1474335
fc155ec3fc Merge branch 'dmfj-parse-objectid-timestamp' 2020-03-27 11:56:58 +00:00
n1474335
3a0c8a199a Tidied up 'Parse ObjectID Timestamp' operation 2020-03-27 11:56:42 +00:00
n1474335
9c729c4490 Merge branch 'parse-objectid-timestamp' of https://github.com/dmfj/CyberChef into dmfj-parse-objectid-timestamp 2020-03-27 11:48:55 +00:00
n1474335
19bdbd66e5 Merge branch 'cbeuw-stacking-fix' 2020-03-27 10:19:14 +00:00
n1474335
ea090f79ee Merge branch 'stacking-fix' of https://github.com/cbeuw/CyberChef into cbeuw-stacking-fix 2020-03-27 10:17:54 +00:00
Andy Wang
1be6c54be2 Fix dropup menu being covered 2020-03-26 22:45:03 +00:00
Dominic Fitch-Jones
9f4ef9cdad Add ObjectId timestamp parser operation 2020-03-21 17:42:17 -04:00
n1073645
8a029e5147 Grunt npm tests changed to npm test 2020-03-17 08:40:15 +00:00
n1073645
4251089687 Targa Image Extractor 2020-03-17 08:24:35 +00:00
n1073645
dbcd670ca8 Targa file extractor 2020-03-16 16:56:01 +00:00
Jarrod Connolly
462f619f43 Update JavaScript Minify operation to support ES6. 2019-10-31 23:18:54 -07:00
19 changed files with 2255 additions and 2419 deletions

View File

@@ -2,6 +2,9 @@
All major and minor version changes will be documented in this file. Details of patch-level version changes can be found in [commit messages](https://github.com/gchq/CyberChef/commits/master). All major and minor version changes will be documented in this file. Details of patch-level version changes can be found in [commit messages](https://github.com/gchq/CyberChef/commits/master).
### [9.20.0] - 2020-03-27
- 'Parse ObjectID Timestamp' operation added [@dmfj] | [#987]
### [9.19.0] - 2020-03-24 ### [9.19.0] - 2020-03-24
- Improvements to the 'Magic' operation, allowing it to recognise more data formats and provide more accurate results [@n1073645] [@n1474335] | [#966] [b765534b](https://github.com/gchq/CyberChef/commit/b765534b8b2a0454a5132a0a52d1d8844bcbdaaa) - Improvements to the 'Magic' operation, allowing it to recognise more data formats and provide more accurate results [@n1073645] [@n1474335] | [#966] [b765534b](https://github.com/gchq/CyberChef/commit/b765534b8b2a0454a5132a0a52d1d8844bcbdaaa)
@@ -221,6 +224,7 @@ All major and minor version changes will be documented in this file. Details of
[9.20.0]: https://github.com/gchq/CyberChef/releases/tag/v9.20.0
[9.19.0]: https://github.com/gchq/CyberChef/releases/tag/v9.19.0 [9.19.0]: https://github.com/gchq/CyberChef/releases/tag/v9.19.0
[9.18.0]: https://github.com/gchq/CyberChef/releases/tag/v9.18.0 [9.18.0]: https://github.com/gchq/CyberChef/releases/tag/v9.18.0
[9.17.0]: https://github.com/gchq/CyberChef/releases/tag/v9.17.0 [9.17.0]: https://github.com/gchq/CyberChef/releases/tag/v9.17.0
@@ -318,6 +322,7 @@ All major and minor version changes will be documented in this file. Details of
[@Flavsditz]: https://github.com/Flavsditz [@Flavsditz]: https://github.com/Flavsditz
[@pointhi]: https://github.com/pointhi [@pointhi]: https://github.com/pointhi
[@MarvinJWendt]: https://github.com/MarvinJWendt [@MarvinJWendt]: https://github.com/MarvinJWendt
[@dmfj]: https://github.com/dmfj
[#95]: https://github.com/gchq/CyberChef/pull/299 [#95]: https://github.com/gchq/CyberChef/pull/299
[#173]: https://github.com/gchq/CyberChef/pull/173 [#173]: https://github.com/gchq/CyberChef/pull/173
@@ -389,3 +394,4 @@ All major and minor version changes will be documented in this file. Details of
[#952]: https://github.com/gchq/CyberChef/pull/952 [#952]: https://github.com/gchq/CyberChef/pull/952
[#965]: https://github.com/gchq/CyberChef/pull/965 [#965]: https://github.com/gchq/CyberChef/pull/965
[#966]: https://github.com/gchq/CyberChef/pull/966 [#966]: https://github.com/gchq/CyberChef/pull/966
[#987]: https://github.com/gchq/CyberChef/pull/987

View File

@@ -37,7 +37,7 @@ module.exports = function (grunt) {
]); ]);
grunt.registerTask("configTests", grunt.registerTask("configTests",
"A task which configures config files in preparation for tests to be run. Use `npm tests` to run tests.", "A task which configures config files in preparation for tests to be run. Use `npm test` to run tests.",
[ [
"clean:config", "clean:nodeConfig", "exec:generateConfig", "exec:generateNodeIndex" "clean:config", "clean:nodeConfig", "exec:generateConfig", "exec:generateNodeIndex"
]); ]);

4338
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "cyberchef", "name": "cyberchef",
"version": "9.19.1", "version": "9.20.6",
"description": "The Cyber Swiss Army Knife for encryption, encoding, compression and data analysis.", "description": "The Cyber Swiss Army Knife for encryption, encoding, compression and data analysis.",
"author": "n1474335 <n1474335@gmail.com>", "author": "n1474335 <n1474335@gmail.com>",
"homepage": "https://gchq.github.io/CyberChef", "homepage": "https://gchq.github.io/CyberChef",
@@ -43,7 +43,7 @@
"babel-eslint": "^10.1.0", "babel-eslint": "^10.1.0",
"babel-loader": "^8.0.6", "babel-loader": "^8.0.6",
"babel-plugin-dynamic-import-node": "^2.3.0", "babel-plugin-dynamic-import-node": "^2.3.0",
"chromedriver": "^80.0.1", "chromedriver": "^83.0.0",
"cli-progress": "^3.6.0", "cli-progress": "^3.6.0",
"colors": "^1.4.0", "colors": "^1.4.0",
"copy-webpack-plugin": "^5.1.1", "copy-webpack-plugin": "^5.1.1",
@@ -108,7 +108,6 @@
"es6-promisify": "^6.1.0", "es6-promisify": "^6.1.0",
"escodegen": "^1.14.1", "escodegen": "^1.14.1",
"esm": "^3.2.25", "esm": "^3.2.25",
"esmangle": "^1.0.1",
"esprima": "^4.0.1", "esprima": "^4.0.1",
"exif-parser": "^0.1.12", "exif-parser": "^0.1.12",
"file-saver": "^2.0.2", "file-saver": "^2.0.2",
@@ -146,6 +145,7 @@
"sortablejs": "^1.10.2", "sortablejs": "^1.10.2",
"split.js": "^1.5.11", "split.js": "^1.5.11",
"ssdeep.js": "0.0.2", "ssdeep.js": "0.0.2",
"terser": "^4.3.9",
"tesseract.js": "^2.0.2", "tesseract.js": "^2.0.2",
"ua-parser-js": "^0.7.21", "ua-parser-js": "^0.7.21",
"unorm": "^1.6.0", "unorm": "^1.6.0",

View File

@@ -242,6 +242,7 @@
"Convert co-ordinate format", "Convert co-ordinate format",
"Show on map", "Show on map",
"Parse UNIX file permissions", "Parse UNIX file permissions",
"Parse ObjectID timestamp",
"Swap endianness", "Swap endianness",
"Parse colour code", "Parse colour code",
"Escape string", "Escape string",

View File

@@ -17,7 +17,7 @@ class DishJSON extends DishType {
*/ */
static toArrayBuffer() { static toArrayBuffer() {
DishJSON.checkForValue(this.value); DishJSON.checkForValue(this.value);
this.value = this.value ? Utils.strToArrayBuffer(JSON.stringify(this.value, null, 4)) : new ArrayBuffer; this.value = this.value !== undefined ? Utils.strToArrayBuffer(JSON.stringify(this.value, null, 4)) : new ArrayBuffer;
} }
/** /**

View File

@@ -468,6 +468,34 @@ export const FILE_SIGNATURES = {
], ],
extractor: null extractor: null
}, },
{
name: "Targa Image",
extension: "tga",
mime: "image/x-targa",
description: "",
signature: [
{ // This signature is not at the beginning of the file. The extractor works backwards.
0: 0x54,
1: 0x52,
2: 0x55,
3: 0x45,
4: 0x56,
5: 0x49,
6: 0x53,
7: 0x49,
8: 0x4f,
9: 0x4e,
10: 0x2d,
11: 0x58,
12: 0x46,
13: 0x49,
14: 0x4c,
15: 0x45,
16: 0x2e
}
],
extractor: extractTARGA
}
], ],
"Video": [ "Video": [
{ // Place before webm { // Place before webm
@@ -3047,6 +3075,90 @@ export function extractICO(bytes, offset) {
} }
/**
* TARGA extractor.
*
* @param {Uint8Array} bytes
* @param {number} offset
*/
export function extractTARGA(bytes, offset) {
// Need all the bytes since we do not know how far up the image goes.
const stream = new Stream(bytes);
stream.moveTo(offset - 8);
// Read in the offsets of the possible areas.
const extensionOffset = stream.readInt(4, "le");
const developerOffset = stream.readInt(4, "le");
stream.moveBackwardsBy(8);
/**
* Moves backwards in the stream until it meet bytes that are the same as the amount of bytes moved.
*
* @param {number} sizeOfSize
* @param {number} maxSize
*/
function moveBackwardsUntilSize(maxSize, sizeOfSize) {
for (let i = 0; i < maxSize; i++) {
stream.moveBackwardsBy(1);
// Read in sizeOfSize amount of bytes in.
const size = stream.readInt(sizeOfSize, "le") - 1;
stream.moveBackwardsBy(sizeOfSize);
// If the size matches.
if (size === i)
break;
}
}
/**
* Moves backwards in the stream until we meet bytes(when calculated) that are the same as the amount of bytes moved.
*/
function moveBackwardsUntilImageSize() {
stream.moveBackwardsBy(5);
// The documentation said that 0x100000 was the largest the file could be.
for (let i = 0; i < 0x100000; i++) {
// (Height * Width * pixel depth in bits)/8
const total = (stream.readInt(2, "le") * stream.readInt(2, "le") * stream.readInt(1))/8;
if (total === i-1)
break;
stream.moveBackwardsBy(6);
}
}
if (extensionOffset || developerOffset) {
if (extensionOffset) {
// Size is stored in two bytes hence the maximum is 0xffff.
moveBackwardsUntilSize(0xffff, 2);
// Move to where we think the start of the file is.
stream.moveBackwardsBy(extensionOffset);
} else if (developerOffset) {
// Size is stored in 4 bytes hence the maxiumum is 0xffffffff.
moveBackwardsUntilSize(0xffffffff, 4);
// Size is stored in byte position 6 so have to move back.
stream.moveBackwardsBy(6);
// Move to where we think the start of the file is.
stream.moveBackwardsBy(developerOffset);
}
} else {
// Move backwards until size === number of bytes passed.
moveBackwardsUntilImageSize();
// Move backwards over the reaminder of the header + the 5 we borrowed in moveBackwardsUntilImageSize().
stream.moveBackwardsBy(0xc+5);
}
return stream.carve(stream.position, offset+0x12);
}
/** /**
* WAV extractor. * WAV extractor.
* *

View File

@@ -303,11 +303,13 @@ export default class Stream {
/** /**
* Returns a slice of the stream up to the current position. * Returns a slice of the stream up to the current position.
* *
* @param {number} [start=0]
* @param {number} [finish=this.position]
* @returns {Uint8Array} * @returns {Uint8Array}
*/ */
carve() { carve(start=0, finish=this.position) {
if (this.bitPos > 0) this.position++; if (this.bitPos > 0) finish++;
return this.bytes.slice(0, this.position); return this.bytes.slice(start, finish);
} }
} }

View File

@@ -4,10 +4,9 @@
* @license Apache-2.0 * @license Apache-2.0
*/ */
import OperationError from "../errors/OperationError.mjs";
import Operation from "../Operation.mjs"; import Operation from "../Operation.mjs";
import * as esprima from "esprima"; import Terser from "terser";
import escodegen from "escodegen";
import esmangle from "esmangle";
/** /**
* JavaScript Minify operation * JavaScript Minify operation
@@ -34,22 +33,11 @@ class JavaScriptMinify extends Operation {
* @returns {string} * @returns {string}
*/ */
run(input, args) { run(input, args) {
let result = ""; const result = Terser.minify(input);
const AST = esprima.parseScript(input), if (result.error) {
optimisedAST = esmangle.optimize(AST, null), throw new OperationError(`Error minifying JavaScript. (${result.error})`);
mangledAST = esmangle.mangle(optimisedAST);
result = escodegen.generate(mangledAST, {
format: {
renumber: true,
hexadecimal: true,
escapeless: true,
compact: true,
semicolons: false,
parentheses: false
} }
}); return result.code;
return result;
} }
} }

View File

@@ -51,7 +51,7 @@ class NormaliseUnicode extends Operation {
case "NFKD": case "NFKD":
return unorm.nfkd(input); return unorm.nfkd(input);
case "NFKC": case "NFKC":
return unorm.nfc(input); return unorm.nfkc(input);
default: default:
throw new OperationError("Unknown Normalisation Form"); throw new OperationError("Unknown Normalisation Form");
} }

View File

@@ -1,6 +1,7 @@
/** /**
* @author n1474335 [n1474335@gmail.com] * @author n1474335 [n1474335@gmail.com]
* @author mshwed [m@ttshwed.com] * @author mshwed [m@ttshwed.com]
* @author Matt C [me@mitt.dev]
* @copyright Crown Copyright 2019 * @copyright Crown Copyright 2019
* @license Apache-2.0 * @license Apache-2.0
*/ */
@@ -12,7 +13,7 @@ import { toBase64 } from "../lib/Base64.mjs";
import { isWorkerEnvironment } from "../Utils.mjs"; import { isWorkerEnvironment } from "../Utils.mjs";
import Tesseract from "tesseract.js"; import Tesseract from "tesseract.js";
const { TesseractWorker } = Tesseract; const { createWorker } = Tesseract;
import process from "process"; import process from "process";
@@ -60,23 +61,30 @@ class OpticalCharacterRecognition extends Operation {
const assetDir = isWorkerEnvironment() ? `${self.docURL}/assets/` : `${process.cwd()}/src/core/vendor/`; const assetDir = isWorkerEnvironment() ? `${self.docURL}/assets/` : `${process.cwd()}/src/core/vendor/`;
try { try {
self.sendStatusMessage("Spinning up Tesseract worker...");
const image = `data:${type};base64,${toBase64(input)}`; const image = `data:${type};base64,${toBase64(input)}`;
const worker = new TesseractWorker({ const worker = createWorker({
workerPath: `${assetDir}tesseract/worker.min.js`, workerPath: `${assetDir}tesseract/worker.min.js`,
langPath: `${assetDir}tesseract/lang-data`, langPath: `${assetDir}tesseract/lang-data`,
corePath: `${assetDir}tesseract/tesseract-core.wasm.js`, corePath: `${assetDir}tesseract/tesseract-core.wasm.js`,
}); logger: progress => {
const result = await worker.recognize(image)
.progress(progress => {
if (isWorkerEnvironment()) { if (isWorkerEnvironment()) {
self.sendStatusMessage(`Status: ${progress.status} - ${(parseFloat(progress.progress)*100).toFixed(2)}%`); self.sendStatusMessage(`Status: ${progress.status}${progress.status === "recognizing text" ? ` - ${(parseFloat(progress.progress)*100).toFixed(2)}%`: "" }`);
}
} }
}); });
await worker.load();
self.sendStatusMessage("Loading English language...");
await worker.loadLanguage("eng");
self.sendStatusMessage("Intialising Tesseract API...");
await worker.initialize("eng");
self.sendStatusMessage("Finding text...");
const result = await worker.recognize(image);
if (showConfidence) { if (showConfidence) {
return `Confidence: ${result.confidence}%\n\n${result.text}`; return `Confidence: ${result.data.confidence}%\n\n${result.data.text}`;
} else { } else {
return result.text; return result.data.text;
} }
} catch (err) { } catch (err) {
throw new OperationError(`Error performing OCR on image. (${err})`); throw new OperationError(`Error performing OCR on image. (${err})`);

View File

@@ -0,0 +1,47 @@
/**
* @author dmfj [dominic@dmfj.io]
* @copyright Crown Copyright 2020
* @license Apache-2.0
*/
import Operation from "../Operation.mjs";
import OperationError from "../errors/OperationError.mjs";
import BSON from "bson";
/**
* Parse ObjectID timestamp operation
*/
class ParseObjectIDTimestamp extends Operation {
/**
* ParseObjectIDTimestamp constructor
*/
constructor() {
super();
this.name = "Parse ObjectID timestamp";
this.module = "Serialise";
this.description = "Parse timestamp from MongoDB/BSON ObjectID hex string.";
this.infoURL = "https://docs.mongodb.com/manual/reference/method/ObjectId.getTimestamp/";
this.inputType = "string";
this.outputType = "string";
this.args = [];
}
/**
* @param {string} input
* @param {Object[]} args
* @returns {string}
*/
run(input, args) {
try {
const objectId = new BSON.ObjectID(input);
return objectId.getTimestamp().toISOString();
} catch (err) {
throw new OperationError(err);
}
}
}
export default ParseObjectIDTimestamp;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -51,7 +51,6 @@ class RecipeWaiter {
} }
}.bind(this), }.bind(this),
onSort: function(evt) { onSort: function(evt) {
this.updateZIndices();
if (evt.from.id === "rec-list") { if (evt.from.id === "rec-list") {
document.dispatchEvent(this.manager.statechange); document.dispatchEvent(this.manager.statechange);
} }
@@ -150,19 +149,6 @@ class RecipeWaiter {
} }
/**
* Sets the z-index property on each operation to make sure that operations higher in the list
* have a higher index, meaning dropdowns are not hidden underneath subsequent operations.
*/
updateZIndices() {
const operations = document.getElementById("rec-list").children;
for (let i = 0; i < operations.length; i++) {
const operation = operations[i];
operation.style.zIndex = 100 + operations.length - i;
}
}
/** /**
* Handler for favourite dragover events. * Handler for favourite dragover events.
* If the element being dragged is an operation, displays a visual cue so that the user knows it can * If the element being dragged is an operation, displays a visual cue so that the user knows it can
@@ -480,7 +466,6 @@ class RecipeWaiter {
log.debug(`'${e.target.querySelector(".op-title").textContent}' added to recipe`); log.debug(`'${e.target.querySelector(".op-title").textContent}' added to recipe`);
this.triggerArgEvents(e.target); this.triggerArgEvents(e.target);
this.updateZIndices();
window.dispatchEvent(this.manager.statechange); window.dispatchEvent(this.manager.statechange);
} }

View File

@@ -100,6 +100,7 @@ import "./tests/Lorenz.mjs";
import "./tests/LuhnChecksum.mjs"; import "./tests/LuhnChecksum.mjs";
import "./tests/CipherSaber2.mjs"; import "./tests/CipherSaber2.mjs";
import "./tests/Colossus.mjs"; import "./tests/Colossus.mjs";
import "./tests/ParseObjectIDTimestamp.mjs";
// Cannot test operations that use the File type yet // Cannot test operations that use the File type yet
@@ -120,4 +121,3 @@ const logOpsTestReport = logTestReport.bind(null, testStatus);
const results = await TestRegister.runTests(); const results = await TestRegister.runTests();
logOpsTestReport(results); logOpsTestReport(results);
})(); })();

View File

@@ -42,7 +42,7 @@ TestRegister.addTests([
}, { }, {
name: "Normalise Unicode - NFKC", name: "Normalise Unicode - NFKC",
input: "\u00c7\u0043\u0327\u2160", input: "\u00c7\u0043\u0327\u2160",
expectedMatch: /\u00C7\u00C7\u2160/, expectedMatch: /\u00C7\u00C7I/,
recipeConfig: [ recipeConfig: [
{ {
op: "Normalise Unicode", op: "Normalise Unicode",

View File

@@ -0,0 +1,24 @@
/**
* Parse ObjectID timestamp tests
*
* @author dmfj [dominic@dmfj.io]
*
* @copyright Crown Copyright 2018
* @license Apache-2.0
*/
import TestRegister from "../../lib/TestRegister.mjs";
TestRegister.addTests([
{
name: "Parse ISO timestamp from ObjectId",
input: "000000000000000000000000",
expectedOutput: "1970-01-01T00:00:00.000Z",
recipeConfig: [
{
op: "Parse ObjectID timestamp",
args: [],
}
],
}
]);

View File

@@ -56,6 +56,14 @@ module.exports = {
context: "src/core/vendor/", context: "src/core/vendor/",
from: "tesseract/**/*", from: "tesseract/**/*",
to: "assets/" to: "assets/"
}, {
context: "node_modules/tesseract.js/",
from: "dist/worker.min.js",
to: "assets/tesseract"
}, {
context: "node_modules/tesseract.js-core/",
from: "tesseract-core.wasm.js",
to: "assets/tesseract"
} }
]) ])
], ],