2
0
mirror of https://github.com/gchq/CyberChef synced 2026-01-06 18:43:23 +00:00

Compare commits

..

50 Commits

Author SHA1 Message Date
d98762625
5c35205315 9.20.4 2020-05-27 13:35:20 +01:00
d98762625
10751934e4 add uname dependency and bump chromedriver 2020-05-27 12:28:59 +01:00
d98762625
d658f91106 Merge branch 'master' of github.com:gchq/CyberChef into javascript-minify 2020-05-27 11:54:07 +01:00
n1474335
4c3324aea1 9.20.3 2020-03-30 11:17:46 +01:00
n1474335
ac2fcee90f Merge branch 'Danh4-issue-998' 2020-03-30 11:17:38 +01:00
71819
94e00115fe Issue 998: DishJSON should only replace undefined with new ArrayBuffer not null or false 2020-03-28 20:27:59 +00:00
d98762625
29255d2338 Merge pull request #983 from n1073645/gruntSpelling
Grunt npm tests comment changed to npm test
2020-03-27 14:23:54 +00:00
n1474335
39278cfce7 9.20.2 2020-03-27 12:10:01 +00:00
n1474335
46cc48cfb9 Renamed Parse ObjectID Timestamp operation files 2020-03-27 12:09:57 +00:00
n1474335
eb4009949d 9.20.1 2020-03-27 12:05:41 +00:00
n1474335
57c48a4bd2 Merge branch 'n1073645-TargaExtractor' 2020-03-27 12:05:34 +00:00
n1474335
45011de494 Tidied up TARGE extractor 2020-03-27 12:05:23 +00:00
n1474335
5e51ed0a5f Merge branch 'TargaExtractor' of https://github.com/n1073645/CyberChef into n1073645-TargaExtractor 2020-03-27 12:01:46 +00:00
n1474335
875802ef2a 9.20.0 2020-03-27 12:00:39 +00:00
n1474335
bbc255ef83 Updated CHANGELOG 2020-03-27 12:00:33 +00:00
n1474335
fc155ec3fc Merge branch 'dmfj-parse-objectid-timestamp' 2020-03-27 11:56:58 +00:00
n1474335
3a0c8a199a Tidied up 'Parse ObjectID Timestamp' operation 2020-03-27 11:56:42 +00:00
n1474335
9c729c4490 Merge branch 'parse-objectid-timestamp' of https://github.com/dmfj/CyberChef into dmfj-parse-objectid-timestamp 2020-03-27 11:48:55 +00:00
n1474335
19bdbd66e5 Merge branch 'cbeuw-stacking-fix' 2020-03-27 10:19:14 +00:00
n1474335
ea090f79ee Merge branch 'stacking-fix' of https://github.com/cbeuw/CyberChef into cbeuw-stacking-fix 2020-03-27 10:17:54 +00:00
Andy Wang
1be6c54be2 Fix dropup menu being covered 2020-03-26 22:45:03 +00:00
n1474335
a4eeb226b1 9.19.1 2020-03-26 12:02:09 +00:00
n1474335
d136717636 Merge branch 'n1073645-MP3Extractor' 2020-03-26 12:02:02 +00:00
n1474335
ad0a2e6f58 Merge branch 'MP3Extractor' of https://github.com/n1073645/CyberChef into n1073645-MP3Extractor 2020-03-26 12:01:30 +00:00
n1474335
0212bfb46e 9.19.0 2020-03-24 11:37:30 +00:00
n1474335
5e0d661542 Updated CHANGELOG 2020-03-24 11:37:26 +00:00
n1474335
4c5f529ef4 Merge branch 'n1073645-newMagic' 2020-03-24 11:07:20 +00:00
n1474335
b765534b8b Tidied up the Magic operation 2020-03-24 11:06:37 +00:00
n1073645
7c672c5ee9 MP3 Extractor added 2020-03-23 10:11:24 +00:00
n1073645
090bf3f8ec MP3 Extractor added 2020-03-23 10:10:47 +00:00
Dominic Fitch-Jones
9f4ef9cdad Add ObjectId timestamp parser operation 2020-03-21 17:42:17 -04:00
n1474335
26fa66ef64 Merge branch 'newMagic' of https://github.com/n1073645/CyberChef into n1073645-newMagic 2020-03-20 14:51:40 +00:00
n1073645
b69e4567c0 MP3extractor 2020-03-19 16:10:22 +00:00
n1474335
26b19350f2 9.18.2 2020-03-18 16:47:53 +00:00
n1474335
2018b7e247 Fixed sitemap 2020-03-18 16:47:48 +00:00
n1073645
ff585584f6 MP3 Extractor 2020-03-18 12:51:47 +00:00
n1073645
8a029e5147 Grunt npm tests changed to npm test 2020-03-17 08:40:15 +00:00
n1073645
4251089687 Targa Image Extractor 2020-03-17 08:24:35 +00:00
n1073645
dbcd670ca8 Targa file extractor 2020-03-16 16:56:01 +00:00
n1073645
5b6a53be3e Docstrings added for Magic functions 2020-03-12 14:55:19 +00:00
n1073645
5b5105c864 Caching added for Magic regexes 2020-03-12 14:45:40 +00:00
n1073645
570a84b67a More Magic tests 2020-03-11 16:27:37 +00:00
n1073645
fd7176a445 Extra Magic Tests 2020-03-11 12:51:46 +00:00
n1073645
0a06472639 Test added for From Hex 2020-03-10 11:23:14 +00:00
n1073645
3f3a7cd4f6 From Hex Regexes 2020-03-10 11:12:43 +00:00
n1073645
99415359d0 Extra Magic Tests 2020-03-10 09:39:13 +00:00
n1073645
20d0ae5304 Linting corrections 2020-02-25 11:35:39 +00:00
n1073645
2ba37af109 extra signatures 2020-02-25 11:33:35 +00:00
n1073645
728f8e65d6 Magic rebuild 2020-02-25 11:27:03 +00:00
Jarrod Connolly
462f619f43 Update JavaScript Minify operation to support ES6. 2019-10-31 23:18:54 -07:00
59 changed files with 2869 additions and 2559 deletions

View File

@@ -2,6 +2,12 @@
All major and minor version changes will be documented in this file. Details of patch-level version changes can be found in [commit messages](https://github.com/gchq/CyberChef/commits/master).
### [9.20.0] - 2020-03-27
- 'Parse ObjectID Timestamp' operation added [@dmfj] | [#987]
### [9.19.0] - 2020-03-24
- Improvements to the 'Magic' operation, allowing it to recognise more data formats and provide more accurate results [@n1073645] [@n1474335] | [#966] [b765534b](https://github.com/gchq/CyberChef/commit/b765534b8b2a0454a5132a0a52d1d8844bcbdaaa)
### [9.18.0] - 2020-03-13
- 'Convert to NATO alphabet' operation added [@MarvinJWendt] | [#674]
@@ -218,6 +224,8 @@ All major and minor version changes will be documented in this file. Details of
[9.20.0]: https://github.com/gchq/CyberChef/releases/tag/v9.20.0
[9.19.0]: https://github.com/gchq/CyberChef/releases/tag/v9.19.0
[9.18.0]: https://github.com/gchq/CyberChef/releases/tag/v9.18.0
[9.17.0]: https://github.com/gchq/CyberChef/releases/tag/v9.17.0
[9.16.0]: https://github.com/gchq/CyberChef/releases/tag/v9.16.0
@@ -314,6 +322,7 @@ All major and minor version changes will be documented in this file. Details of
[@Flavsditz]: https://github.com/Flavsditz
[@pointhi]: https://github.com/pointhi
[@MarvinJWendt]: https://github.com/MarvinJWendt
[@dmfj]: https://github.com/dmfj
[#95]: https://github.com/gchq/CyberChef/pull/299
[#173]: https://github.com/gchq/CyberChef/pull/173
@@ -384,3 +393,5 @@ All major and minor version changes will be documented in this file. Details of
[#948]: https://github.com/gchq/CyberChef/pull/948
[#952]: https://github.com/gchq/CyberChef/pull/952
[#965]: https://github.com/gchq/CyberChef/pull/965
[#966]: https://github.com/gchq/CyberChef/pull/966
[#987]: https://github.com/gchq/CyberChef/pull/987

View File

@@ -37,7 +37,7 @@ module.exports = function (grunt) {
]);
grunt.registerTask("configTests",
"A task which configures config files in preparation for tests to be run. Use `npm tests` to run tests.",
"A task which configures config files in preparation for tests to be run. Use `npm test` to run tests.",
[
"clean:config", "clean:nodeConfig", "exec:generateConfig", "exec:generateNodeIndex"
]);

4344
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "cyberchef",
"version": "9.18.1",
"version": "9.20.4",
"description": "The Cyber Swiss Army Knife for encryption, encoding, compression and data analysis.",
"author": "n1474335 <n1474335@gmail.com>",
"homepage": "https://gchq.github.io/CyberChef",
@@ -43,7 +43,7 @@
"babel-eslint": "^10.1.0",
"babel-loader": "^8.0.6",
"babel-plugin-dynamic-import-node": "^2.3.0",
"chromedriver": "^80.0.1",
"chromedriver": "^83.0.0",
"cli-progress": "^3.6.0",
"colors": "^1.4.0",
"copy-webpack-plugin": "^5.1.1",
@@ -108,7 +108,6 @@
"es6-promisify": "^6.1.0",
"escodegen": "^1.14.1",
"esm": "^3.2.25",
"esmangle": "^1.0.1",
"esprima": "^4.0.1",
"exif-parser": "^0.1.12",
"file-saver": "^2.0.2",
@@ -146,6 +145,7 @@
"sortablejs": "^1.10.2",
"split.js": "^1.5.11",
"ssdeep.js": "0.0.2",
"terser": "^4.3.9",
"tesseract.js": "^2.0.2",
"ua-parser-js": "^0.7.21",
"unorm": "^1.6.0",

View File

@@ -1182,6 +1182,7 @@ class Utils {
"CRLF": /\r\n/g,
"Forward slash": /\//g,
"Backslash": /\\/g,
"0x with comma": /,?0x/g,
"0x": /0x/g,
"\\x": /\\x/g,
"None": /\s+/g // Included here to remove whitespace when there shouldn't be any

View File

@@ -242,6 +242,7 @@
"Convert co-ordinate format",
"Show on map",
"Parse UNIX file permissions",
"Parse ObjectID timestamp",
"Swap endianness",
"Parse colour code",
"Escape string",

View File

@@ -42,13 +42,10 @@ for (const opObj in Ops) {
outputType: op.presentType,
flowControl: op.flowControl,
manualBake: op.manualBake,
args: op.args
args: op.args,
checks: op.checks
};
if ("patterns" in op) {
operationConfig[op.name].patterns = op.patterns;
}
if (!(op.module in modules))
modules[op.module] = {};
modules[op.module][op.name] = opObj;

View File

@@ -17,7 +17,7 @@ class DishJSON extends DishType {
*/
static toArrayBuffer() {
DishJSON.checkForValue(this.value);
this.value = this.value ? Utils.strToArrayBuffer(JSON.stringify(this.value, null, 4)) : new ArrayBuffer;
this.value = this.value !== undefined ? Utils.strToArrayBuffer(JSON.stringify(this.value, null, 4)) : new ArrayBuffer;
}
/**

View File

@@ -468,6 +468,34 @@ export const FILE_SIGNATURES = {
],
extractor: null
},
{
name: "Targa Image",
extension: "tga",
mime: "image/x-targa",
description: "",
signature: [
{ // This signature is not at the beginning of the file. The extractor works backwards.
0: 0x54,
1: 0x52,
2: 0x55,
3: 0x45,
4: 0x56,
5: 0x49,
6: 0x53,
7: 0x49,
8: 0x4f,
9: 0x4e,
10: 0x2d,
11: 0x58,
12: 0x46,
13: 0x49,
14: 0x4c,
15: 0x45,
16: 0x2e
}
],
extractor: extractTARGA
}
],
"Video": [
{ // Place before webm
@@ -780,7 +808,7 @@ export const FILE_SIGNATURES = {
1: 0xfb
}
],
extractor: null
extractor: extractMP3
},
{
name: "MPEG-4 Part 14 audio",
@@ -3047,6 +3075,90 @@ export function extractICO(bytes, offset) {
}
/**
* TARGA extractor.
*
* @param {Uint8Array} bytes
* @param {number} offset
*/
export function extractTARGA(bytes, offset) {
// Need all the bytes since we do not know how far up the image goes.
const stream = new Stream(bytes);
stream.moveTo(offset - 8);
// Read in the offsets of the possible areas.
const extensionOffset = stream.readInt(4, "le");
const developerOffset = stream.readInt(4, "le");
stream.moveBackwardsBy(8);
/**
* Moves backwards in the stream until it meet bytes that are the same as the amount of bytes moved.
*
* @param {number} sizeOfSize
* @param {number} maxSize
*/
function moveBackwardsUntilSize(maxSize, sizeOfSize) {
for (let i = 0; i < maxSize; i++) {
stream.moveBackwardsBy(1);
// Read in sizeOfSize amount of bytes in.
const size = stream.readInt(sizeOfSize, "le") - 1;
stream.moveBackwardsBy(sizeOfSize);
// If the size matches.
if (size === i)
break;
}
}
/**
* Moves backwards in the stream until we meet bytes(when calculated) that are the same as the amount of bytes moved.
*/
function moveBackwardsUntilImageSize() {
stream.moveBackwardsBy(5);
// The documentation said that 0x100000 was the largest the file could be.
for (let i = 0; i < 0x100000; i++) {
// (Height * Width * pixel depth in bits)/8
const total = (stream.readInt(2, "le") * stream.readInt(2, "le") * stream.readInt(1))/8;
if (total === i-1)
break;
stream.moveBackwardsBy(6);
}
}
if (extensionOffset || developerOffset) {
if (extensionOffset) {
// Size is stored in two bytes hence the maximum is 0xffff.
moveBackwardsUntilSize(0xffff, 2);
// Move to where we think the start of the file is.
stream.moveBackwardsBy(extensionOffset);
} else if (developerOffset) {
// Size is stored in 4 bytes hence the maxiumum is 0xffffffff.
moveBackwardsUntilSize(0xffffffff, 4);
// Size is stored in byte position 6 so have to move back.
stream.moveBackwardsBy(6);
// Move to where we think the start of the file is.
stream.moveBackwardsBy(developerOffset);
}
} else {
// Move backwards until size === number of bytes passed.
moveBackwardsUntilImageSize();
// Move backwards over the reaminder of the header + the 5 we borrowed in moveBackwardsUntilImageSize().
stream.moveBackwardsBy(0xc+5);
}
return stream.carve(stream.position, offset+0x12);
}
/**
* WAV extractor.
*
@@ -3067,6 +3179,79 @@ export function extractWAV(bytes, offset) {
}
/**
* MP3 extractor.
*
* @param {Uint8Array} bytes
* @param {Number} offset
* @returns {Uint8Array}
*/
export function extractMP3(bytes, offset) {
const stream = new Stream(bytes.slice(offset));
// Constants for flag byte.
const bitRateIndexes = ["free", 32000, 40000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 160000, 192000, 224000, 256000, 320000, "bad"];
const samplingRateFrequencyIndex = [44100, 48000, 32000, "reserved"];
// ID3 tag, move over it.
if ((stream.getBytes(3).toString() === [0x49, 0x44, 0x33].toString())) {
stream.moveTo(6);
const tagSize = (stream.readInt(1) << 21) | (stream.readInt(1) << 14) | (stream.readInt(1) << 7) | stream.readInt(1);
stream.moveForwardsBy(tagSize);
} else {
stream.moveTo(0);
}
// Loop over all the frame headers in the file.
while (stream.hasMore()) {
// If it has an old TAG frame at the end of it, fixed size, 128 bytes.
if (stream.getBytes(3) === [0x54, 0x41, 0x47].toString()) {
stream.moveForwardsBy(125);
break;
}
// If not start of frame.
if (stream.getBytes(2).toString() !== [0xff, 0xfb].toString()) {
stream.moveBackwardsBy(2);
break;
}
// Read flag byte.
const flags = stream.readInt(1);
// Extract frame bit rate from flag byte.
const bitRate = bitRateIndexes[flags >> 4];
// Extract frame sample rate from flag byte.
const sampleRate = samplingRateFrequencyIndex[(flags & 0x0f) >> 2];
// Padding if the frame size is not a multiple of the bitrate.
const padding = (flags & 0x02) >> 1;
// Things that are either not standard or undocumented.
if (bitRate === "free" || bitRate === "bad" || sampleRate === "reserved") {
stream.moveBackwardsBy(1);
break;
}
// Formula: FrameLength = (144 * BitRate / SampleRate ) + Padding
const frameSize = Math.floor(((144 * bitRate) / sampleRate) + padding);
// If the next move goes past the end of the bytestream then extract the entire bytestream.
// We assume complete frames in the above formula because there is no field that suggests otherwise.
if ((stream.position + frameSize) > stream.length) {
stream.moveTo(stream.length);
break;
} else {
stream.moveForwardsBy(frameSize - 3);
}
}
return stream.carve();
}
/**
* FLV extractor.
*

View File

@@ -26,7 +26,7 @@ export function ipv4CidrRange(cidr, includeNetworkInfo, enumerateAddresses, allo
let output = "";
if (cidrRange < 0 || cidrRange > 31) {
return "IPv4 CIDR must be less than 32";
throw new OperationError("IPv4 CIDR must be less than 32");
}
const mask = ~(0xFFFFFFFF >>> cidrRange),
@@ -64,7 +64,7 @@ export function ipv6CidrRange(cidr, includeNetworkInfo) {
cidrRange = parseInt(cidr[cidr.length-1], 10);
if (cidrRange < 0 || cidrRange > 127) {
return "IPv6 CIDR must be less than 128";
throw new OperationError("IPv6 CIDR must be less than 128");
}
const ip1 = new Array(8),
@@ -211,7 +211,7 @@ export function ipv4ListedRange(match, includeNetworkInfo, enumerateAddresses, a
const network = strToIpv4(ipv4CidrList[i].split("/")[0]);
const cidrRange = parseInt(ipv4CidrList[i].split("/")[1], 10);
if (cidrRange < 0 || cidrRange > 31) {
return "IPv4 CIDR must be less than 32";
throw new OperationError("IPv4 CIDR must be less than 32");
}
const mask = ~(0xFFFFFFFF >>> cidrRange),
cidrIp1 = network & mask,
@@ -254,7 +254,7 @@ export function ipv6ListedRange(match, includeNetworkInfo) {
const cidrRange = parseInt(ipv6CidrList[i].split("/")[1], 10);
if (cidrRange < 0 || cidrRange > 127) {
return "IPv6 CIDR must be less than 128";
throw new OperationError("IPv6 CIDR must be less than 128");
}
const cidrIp1 = new Array(8),

View File

@@ -2,7 +2,7 @@ import OperationConfig from "../config/OperationConfig.json";
import Utils, { isWorkerEnvironment } from "../Utils.mjs";
import Recipe from "../Recipe.mjs";
import Dish from "../Dish.mjs";
import {detectFileType} from "./FileType.mjs";
import {detectFileType, isType} from "./FileType.mjs";
import chiSquared from "chi-squared";
/**
@@ -19,31 +19,38 @@ class Magic {
* Magic constructor.
*
* @param {ArrayBuffer} buf
* @param {Object[]} [opPatterns]
* @param {Object[]} [opCriteria]
* @param {Object} [prevOp]
*/
constructor(buf, opPatterns) {
constructor(buf, opCriteria=Magic._generateOpCriteria(), prevOp=null) {
this.inputBuffer = new Uint8Array(buf);
this.inputStr = Utils.arrayBufferToStr(buf);
this.opPatterns = opPatterns || Magic._generateOpPatterns();
this.opCriteria = opCriteria;
this.prevOp = prevOp;
}
/**
* Finds operations that claim to be able to decode the input based on regular
* expression matches.
* Finds operations that claim to be able to decode the input based on various criteria.
*
* @returns {Object[]}
*/
findMatchingOps() {
const matches = [];
findMatchingInputOps() {
const matches = [],
inputEntropy = this.calcEntropy();
for (let i = 0; i < this.opPatterns.length; i++) {
const pattern = this.opPatterns[i],
regex = new RegExp(pattern.match, pattern.flags);
this.opCriteria.forEach(check => {
// If the input doesn't lie in the required entropy range, move on
if (check.entropyRange &&
(inputEntropy < check.entropyRange[0] ||
inputEntropy > check.entropyRange[1]))
return;
// If the input doesn't match the pattern, move on
if (check.pattern &&
!check.pattern.test(this.inputStr))
return;
if (regex.test(this.inputStr)) {
matches.push(pattern);
}
}
matches.push(check);
});
return matches;
}
@@ -185,8 +192,10 @@ class Magic {
*
* @returns {number}
*/
calcEntropy() {
const prob = this._freqDist();
calcEntropy(data=this.inputBuffer, standalone=false) {
if (!standalone && this.inputEntropy) return this.inputEntropy;
const prob = this._freqDist(data, standalone);
let entropy = 0,
p;
@@ -195,6 +204,8 @@ class Magic {
if (p === 0) continue;
entropy += p * Math.log(p) / Math.log(2);
}
if (!standalone) this.inputEntropy = -entropy;
return -entropy;
}
@@ -264,25 +275,59 @@ class Magic {
return results;
}
/**
* Checks whether the data passes output criteria for an operation check
*
* @param {ArrayBuffer} data
* @param {Object} criteria
* @returns {boolean}
*/
outputCheckPasses(data, criteria) {
if (criteria.pattern) {
const dataStr = Utils.arrayBufferToStr(data),
regex = new RegExp(criteria.pattern, criteria.flags);
if (!regex.test(dataStr))
return false;
}
if (criteria.entropyRange) {
const dataEntropy = this.calcEntropy(data, true);
if (dataEntropy < criteria.entropyRange[0] || dataEntropy > criteria.entropyRange[1])
return false;
}
if (criteria.mime &&
!isType(criteria.mime, data))
return false;
return true;
}
/**
* Speculatively executes matching operations, recording metadata of each result.
*
* @param {number} [depth=0] - How many levels to try to execute
* @param {boolean} [extLang=false] - Extensive language support (false = only check the most
* common Internet languages)
* common Internet languages)
* @param {boolean} [intensive=false] - Run brute-forcing on each branch (significantly affects
* performance)
* performance)
* @param {Object[]} [recipeConfig=[]] - The recipe configuration up to this point
* @param {boolean} [useful=false] - Whether the current recipe should be scored highly
* @param {string} [crib=null] - The regex crib provided by the user, for filtering the operation output
* @param {string} [crib=null] - The regex crib provided by the user, for filtering the operation
* output
* @returns {Object[]} - A sorted list of the recipes most likely to result in correct decoding
*/
async speculativeExecution(depth=0, extLang=false, intensive=false, recipeConfig=[], useful=false, crib=null) {
async speculativeExecution(
depth=0,
extLang=false,
intensive=false,
recipeConfig=[],
useful=false,
crib=null) {
// If we have reached the recursion depth, return
if (depth < 0) return [];
// Find any operations that can be run on this data
const matchingOps = this.findMatchingOps();
const matchingOps = this.findMatchingInputOps();
let results = [];
// Record the properties of the current data
@@ -308,17 +353,21 @@ class Magic {
},
output = await this._runRecipe([opConfig]);
// If the recipe is repeating and returning the same data, do not continue
if (prevOp && op.op === prevOp.op && _buffersEqual(output, this.inputBuffer)) {
return;
}
// If the recipe returned an empty buffer, do not continue
if (_buffersEqual(output, new ArrayBuffer())) {
return;
}
const magic = new Magic(output, this.opPatterns),
// If the recipe is repeating and returning the same data, do not continue
if (prevOp && op.op === prevOp.op && _buffersEqual(output, this.inputBuffer)) {
return;
}
// If the output criteria for this op doesn't match the output, do not continue
if (op.output && !this.outputCheckPasses(output, op.output))
return;
const magic = new Magic(output, this.opCriteria, OperationConfig[op.op]),
speculativeResults = await magic.speculativeExecution(
depth-1, extLang, intensive, [...recipeConfig, opConfig], op.useful, crib);
@@ -330,7 +379,7 @@ class Magic {
const bfEncodings = await this.bruteForce();
await Promise.all(bfEncodings.map(async enc => {
const magic = new Magic(enc.data, this.opPatterns),
const magic = new Magic(enc.data, this.opCriteria, undefined),
bfResults = await magic.speculativeExecution(
depth-1, extLang, false, [...recipeConfig, enc.conf], false, crib);
@@ -345,7 +394,8 @@ class Magic {
r.languageScores[0].probability > 0 || // Some kind of language was found
r.fileType || // A file was found
r.isUTF8 || // UTF-8 was found
r.matchingOps.length // A matching op was found
r.matchingOps.length || // A matching op was found
r.matchesCrib // The crib matches
)
);
@@ -376,9 +426,10 @@ class Magic {
bScore += b.entropy;
// A result with no recipe but matching ops suggests there are better options
if ((!a.recipe.length && a.matchingOps.length) &&
b.recipe.length)
if ((!a.recipe.length && a.matchingOps.length) && b.recipe.length)
return 1;
if ((!b.recipe.length && b.matchingOps.length) && a.recipe.length)
return -1;
return aScore - bScore;
});
@@ -417,14 +468,16 @@ class Magic {
* Calculates the number of times each byte appears in the input as a percentage
*
* @private
* @param {ArrayBuffer} [data]
* @param {boolean} [standalone]
* @returns {number[]}
*/
_freqDist() {
if (this.freqDist) return this.freqDist;
_freqDist(data=this.inputBuffer, standalone=false) {
if (!standalone && this.freqDist) return this.freqDist;
const len = this.inputBuffer.length;
const len = data.length,
counts = new Array(256).fill(0);
let i = len;
const counts = new Array(256).fill(0);
if (!len) {
this.freqDist = counts;
@@ -432,13 +485,15 @@ class Magic {
}
while (i--) {
counts[this.inputBuffer[i]]++;
counts[data[i]]++;
}
this.freqDist = counts.map(c => {
const result = counts.map(c => {
return c / len * 100;
});
return this.freqDist;
if (!standalone) this.freqDist = result;
return result;
}
/**
@@ -447,24 +502,29 @@ class Magic {
* @private
* @returns {Object[]}
*/
static _generateOpPatterns() {
const opPatterns = [];
static _generateOpCriteria() {
const opCriteria = [];
for (const op in OperationConfig) {
if (!("patterns" in OperationConfig[op])) continue;
if (!("checks" in OperationConfig[op]))
continue;
OperationConfig[op].patterns.forEach(pattern => {
opPatterns.push({
OperationConfig[op].checks.forEach(check => {
// Add to the opCriteria list.
// Compile the regex here and cache the compiled version so we
// don't have to keep calculating it.
opCriteria.push({
op: op,
match: pattern.match,
flags: pattern.flags,
args: pattern.args,
useful: pattern.useful || false
pattern: check.pattern ? new RegExp(check.pattern, check.flags) : null,
args: check.args,
useful: check.useful,
entropyRange: check.entropyRange,
output: check.output
});
});
}
return opPatterns;
return opCriteria;
}
/**

View File

@@ -303,11 +303,13 @@ export default class Stream {
/**
* Returns a slice of the stream up to the current position.
*
* @param {number} [start=0]
* @param {number} [finish=this.position]
* @returns {Uint8Array}
*/
carve() {
if (this.bitPos > 0) this.position++;
return this.bytes.slice(0, this.position);
carve(start=0, finish=this.position) {
if (this.bitPos > 0) finish++;
return this.bytes.slice(start, finish);
}
}

View File

@@ -33,6 +33,38 @@ class A1Z26CipherDecode extends Operation {
value: DELIM_OPTIONS
}
];
this.checks = [
{
pattern: "^\\s*([12]?[0-9] )+[12]?[0-9]\\s*$",
flags: "",
args: ["Space"]
},
{
pattern: "^\\s*([12]?[0-9],)+[12]?[0-9]\\s*$",
flags: "",
args: ["Comma"]
},
{
pattern: "^\\s*([12]?[0-9];)+[12]?[0-9]\\s*$",
flags: "",
args: ["Semi-colon"]
},
{
pattern: "^\\s*([12]?[0-9]:)+[12]?[0-9]\\s*$",
flags: "",
args: ["Colon"]
},
{
pattern: "^\\s*([12]?[0-9]\\n)+[12]?[0-9]\\s*$",
flags: "",
args: ["Line feed"]
},
{
pattern: "^\\s*([12]?[0-9]\\r\\n)+[12]?[0-9]\\s*$",
flags: "",
args: ["CRLF"]
}
];
}
/**

View File

@@ -44,6 +44,48 @@ class BaconCipherDecode extends Operation {
"value": false
}
];
this.checks = [
{
pattern: "^\\s*([01]{5}\\s?)+$",
flags: "",
args: ["Standard (I=J and U=V)", "0/1", false]
},
{
pattern: "^\\s*([01]{5}\\s?)+$",
flags: "",
args: ["Standard (I=J and U=V)", "0/1", true]
},
{
pattern: "^\\s*([AB]{5}\\s?)+$",
flags: "",
args: ["Standard (I=J and U=V)", "A/B", false]
},
{
pattern: "^\\s*([AB]{5}\\s?)+$",
flags: "",
args: ["Standard (I=J and U=V)", "A/B", true]
},
{
pattern: "^\\s*([01]{5}\\s?)+$",
flags: "",
args: ["Complete", "0/1", false]
},
{
pattern: "^\\s*([01]{5}\\s?)+$",
flags: "",
args: ["Complete", "0/1", true]
},
{
pattern: "^\\s*([AB]{5}\\s?)+$",
flags: "",
args: ["Complete", "A/B", false]
},
{
pattern: "^\\s*([AB]{5}\\s?)+$",
flags: "",
args: ["Complete", "A/B", true]
}
];
}
/**

View File

@@ -33,9 +33,9 @@ class Bzip2Decompress extends Operation {
value: false
}
];
this.patterns = [
this.checks = [
{
"match": "^\\x42\\x5a\\x68",
"pattern": "^\\x42\\x5a\\x68",
"flags": "",
"args": []
}

View File

@@ -24,6 +24,13 @@ class DechunkHTTPResponse extends Operation {
this.inputType = "string";
this.outputType = "string";
this.args = [];
this.checks = [
{
pattern: "^[0-9A-F]+\r\n",
flags: "i",
args: []
}
];
}
/**

View File

@@ -30,6 +30,13 @@ class DecodeNetBIOSName extends Operation {
"value": 65
}
];
this.checks = [
{
pattern: "^\\s*\\S{32}$",
flags: "",
args: [65]
}
];
}
/**

View File

@@ -25,7 +25,17 @@ class DefangIPAddresses extends Operation {
this.inputType = "string";
this.outputType = "string";
this.args = [];
this.checks = [
{
pattern: "^\\s*(([0-9]{1,3}\\.){3}[0-9]{1,3}|([0-9a-f]{4}:){7}[0-9a-f]{4})\\s*$",
flags: "i",
args: [],
output: {
pattern: "^\\s*(([0-9]{1,3}\\[\\.\\]){3}[0-9]{1,3}|([0-9a-f]{4}\\[\\:\\]){7}[0-9a-f]{4})\\s*$",
flags: "i"
}
}
];
}
/**

View File

@@ -44,22 +44,22 @@ class EscapeUnicodeCharacters extends Operation {
"value": true
}
];
this.patterns = [
this.checks = [
{
match: "\\\\u(?:[\\da-f]{4,6})",
pattern: "\\\\u(?:[\\da-f]{4,6})",
flags: "i",
args: ["\\u"]
},
{
match: "%u(?:[\\da-f]{4,6})",
pattern: "%u(?:[\\da-f]{4,6})",
flags: "i",
args: ["%u"]
},
{
match: "U\\+(?:[\\da-f]{4,6})",
pattern: "U\\+(?:[\\da-f]{4,6})",
flags: "i",
args: ["U+"]
},
}
];
}

View File

@@ -49,9 +49,9 @@ class FromBCD extends Operation {
"value": FORMAT
}
];
this.patterns = [
this.checks = [
{
match: "^(?:\\d{4} ){3,}\\d{4}$",
pattern: "^(?:\\d{4} ){3,}\\d{4}$",
flags: "",
args: ["8 4 2 1", true, false, "Nibbles"]
},

View File

@@ -36,12 +36,12 @@ class FromBase32 extends Operation {
value: true
}
];
this.patterns = [
this.checks = [
{
match: "^(?:[A-Z2-7]{8})+(?:[A-Z2-7]{2}={6}|[A-Z2-7]{4}={4}|[A-Z2-7]{5}={3}|[A-Z2-7]{7}={1})?$",
pattern: "^(?:[A-Z2-7]{8})+(?:[A-Z2-7]{2}={6}|[A-Z2-7]{4}={4}|[A-Z2-7]{5}={3}|[A-Z2-7]{7}={1})?$",
flags: "",
args: ["A-Z2-7=", false]
},
}
];
}

View File

@@ -38,14 +38,14 @@ class FromBase58 extends Operation {
"value": true
}
];
this.patterns = [
this.checks = [
{
match: "^[1-9A-HJ-NP-Za-km-z]{20,}$",
pattern: "^[1-9A-HJ-NP-Za-km-z]{20,}$",
flags: "",
args: ["123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz", false]
},
{
match: "^[1-9A-HJ-NP-Za-km-z]{20,}$",
pattern: "^[1-9A-HJ-NP-Za-km-z]{20,}$",
flags: "",
args: ["rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz", false]
},

View File

@@ -36,69 +36,69 @@ class FromBase64 extends Operation {
value: true
}
];
this.patterns = [
this.checks = [
{
match: "^\\s*(?:[A-Z\\d+/]{4})+(?:[A-Z\\d+/]{2}==|[A-Z\\d+/]{3}=)?\\s*$",
pattern: "^\\s*(?:[A-Z\\d+/]{4})+(?:[A-Z\\d+/]{2}==|[A-Z\\d+/]{3}=)?\\s*$",
flags: "i",
args: ["A-Za-z0-9+/=", true]
},
{
match: "^\\s*[A-Z\\d\\-_]{20,}\\s*$",
pattern: "^\\s*[A-Z\\d\\-_]{20,}\\s*$",
flags: "i",
args: ["A-Za-z0-9-_", true]
},
{
match: "^\\s*(?:[A-Z\\d+\\-]{4}){5,}(?:[A-Z\\d+\\-]{2}==|[A-Z\\d+\\-]{3}=)?\\s*$",
pattern: "^\\s*(?:[A-Z\\d+\\-]{4}){5,}(?:[A-Z\\d+\\-]{2}==|[A-Z\\d+\\-]{3}=)?\\s*$",
flags: "i",
args: ["A-Za-z0-9+\\-=", true]
},
{
match: "^\\s*(?:[A-Z\\d./]{4}){5,}(?:[A-Z\\d./]{2}==|[A-Z\\d./]{3}=)?\\s*$",
pattern: "^\\s*(?:[A-Z\\d./]{4}){5,}(?:[A-Z\\d./]{2}==|[A-Z\\d./]{3}=)?\\s*$",
flags: "i",
args: ["./0-9A-Za-z=", true]
},
{
match: "^\\s*[A-Z\\d_.]{20,}\\s*$",
pattern: "^\\s*[A-Z\\d_.]{20,}\\s*$",
flags: "i",
args: ["A-Za-z0-9_.", true]
},
{
match: "^\\s*(?:[A-Z\\d._]{4}){5,}(?:[A-Z\\d._]{2}--|[A-Z\\d._]{3}-)?\\s*$",
pattern: "^\\s*(?:[A-Z\\d._]{4}){5,}(?:[A-Z\\d._]{2}--|[A-Z\\d._]{3}-)?\\s*$",
flags: "i",
args: ["A-Za-z0-9._-", true]
},
{
match: "^\\s*(?:[A-Z\\d+/]{4}){5,}(?:[A-Z\\d+/]{2}==|[A-Z\\d+/]{3}=)?\\s*$",
pattern: "^\\s*(?:[A-Z\\d+/]{4}){5,}(?:[A-Z\\d+/]{2}==|[A-Z\\d+/]{3}=)?\\s*$",
flags: "i",
args: ["0-9a-zA-Z+/=", true]
},
{
match: "^\\s*(?:[A-Z\\d+/]{4}){5,}(?:[A-Z\\d+/]{2}==|[A-Z\\d+/]{3}=)?\\s*$",
pattern: "^\\s*(?:[A-Z\\d+/]{4}){5,}(?:[A-Z\\d+/]{2}==|[A-Z\\d+/]{3}=)?\\s*$",
flags: "i",
args: ["0-9A-Za-z+/=", true]
},
{
match: "^[ !\"#$%&'()*+,\\-./\\d:;<=>?@A-Z[\\\\\\]^_]{20,}$",
pattern: "^[ !\"#$%&'()*+,\\-./\\d:;<=>?@A-Z[\\\\\\]^_]{20,}$",
flags: "",
args: [" -_", false]
},
{
match: "^\\s*[A-Z\\d+\\-]{20,}\\s*$",
pattern: "^\\s*[A-Z\\d+\\-]{20,}\\s*$",
flags: "i",
args: ["+\\-0-9A-Za-z", true]
},
{
match: "^\\s*[!\"#$%&'()*+,\\-0-689@A-NP-VX-Z[`a-fh-mp-r]{20,}\\s*$",
pattern: "^\\s*[!\"#$%&'()*+,\\-0-689@A-NP-VX-Z[`a-fh-mp-r]{20,}\\s*$",
flags: "",
args: ["!-,-0-689@A-NP-VX-Z[`a-fh-mp-r", true]
},
{
match: "^\\s*(?:[N-ZA-M\\d+/]{4}){5,}(?:[N-ZA-M\\d+/]{2}==|[N-ZA-M\\d+/]{3}=)?\\s*$",
pattern: "^\\s*(?:[N-ZA-M\\d+/]{4}){5,}(?:[N-ZA-M\\d+/]{2}==|[N-ZA-M\\d+/]{3}=)?\\s*$",
flags: "i",
args: ["N-ZA-Mn-za-m0-9+/=", true]
},
{
match: "^\\s*[A-Z\\d./]{20,}\\s*$",
pattern: "^\\s*[A-Z\\d./]{20,}\\s*$",
flags: "i",
args: ["./0-9A-Za-z", true]
},

View File

@@ -33,39 +33,39 @@ class FromBinary extends Operation {
"value": BIN_DELIM_OPTIONS
}
];
this.patterns = [
this.checks = [
{
match: "^(?:[01]{8})+$",
pattern: "^(?:[01]{8})+$",
flags: "",
args: ["None"]
},
{
match: "^(?:[01]{8})(?: [01]{8})*$",
pattern: "^(?:[01]{8})(?: [01]{8})*$",
flags: "",
args: ["Space"]
},
{
match: "^(?:[01]{8})(?:,[01]{8})*$",
pattern: "^(?:[01]{8})(?:,[01]{8})*$",
flags: "",
args: ["Comma"]
},
{
match: "^(?:[01]{8})(?:;[01]{8})*$",
pattern: "^(?:[01]{8})(?:;[01]{8})*$",
flags: "",
args: ["Semi-colon"]
},
{
match: "^(?:[01]{8})(?::[01]{8})*$",
pattern: "^(?:[01]{8})(?::[01]{8})*$",
flags: "",
args: ["Colon"]
},
{
match: "^(?:[01]{8})(?:\\n[01]{8})*$",
pattern: "^(?:[01]{8})(?:\\n[01]{8})*$",
flags: "",
args: ["Line feed"]
},
{
match: "^(?:[01]{8})(?:\\r\\n[01]{8})*$",
pattern: "^(?:[01]{8})(?:\\r\\n[01]{8})*$",
flags: "",
args: ["CRLF"]
},

View File

@@ -36,37 +36,37 @@ class FromDecimal extends Operation {
"value": false
}
];
this.patterns = [
this.checks = [
{
match: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?: (?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
pattern: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?: (?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
flags: "",
args: ["Space", false]
},
{
match: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?:,(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
pattern: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?:,(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
flags: "",
args: ["Comma", false]
},
{
match: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?:;(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
pattern: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?:;(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
flags: "",
args: ["Semi-colon", false]
},
{
match: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?::(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
pattern: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?::(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
flags: "",
args: ["Colon", false]
},
{
match: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?:\\n(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
pattern: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?:\\n(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
flags: "",
args: ["Line feed", false]
},
{
match: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?:\\r\\n(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
pattern: "^(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5])(?:\\r\\n(?:\\d{1,2}|1\\d{2}|2[0-4]\\d|25[0-5]))*$",
flags: "",
args: ["CRLF", false]
},
}
];
}

View File

@@ -25,12 +25,12 @@ class FromHTMLEntity extends Operation {
this.inputType = "string";
this.outputType = "string";
this.args = [];
this.patterns = [
this.checks = [
{
match: "&(?:#\\d{2,3}|#x[\\da-f]{2}|[a-z]{2,6});",
pattern: "&(?:#\\d{2,3}|#x[\\da-f]{2}|[a-z]{2,6});",
flags: "i",
args: []
},
}
];
}

View File

@@ -32,49 +32,54 @@ class FromHex extends Operation {
value: FROM_HEX_DELIM_OPTIONS
}
];
this.patterns = [
this.checks = [
{
match: "^(?:[\\dA-F]{2})+$",
pattern: "^(?:[\\dA-F]{2})+$",
flags: "i",
args: ["None"]
},
{
match: "^[\\dA-F]{2}(?: [\\dA-F]{2})*$",
pattern: "^[\\dA-F]{2}(?: [\\dA-F]{2})*$",
flags: "i",
args: ["Space"]
},
{
match: "^[\\dA-F]{2}(?:,[\\dA-F]{2})*$",
pattern: "^[\\dA-F]{2}(?:,[\\dA-F]{2})*$",
flags: "i",
args: ["Comma"]
},
{
match: "^[\\dA-F]{2}(?:;[\\dA-F]{2})*$",
pattern: "^[\\dA-F]{2}(?:;[\\dA-F]{2})*$",
flags: "i",
args: ["Semi-colon"]
},
{
match: "^[\\dA-F]{2}(?::[\\dA-F]{2})*$",
pattern: "^[\\dA-F]{2}(?::[\\dA-F]{2})*$",
flags: "i",
args: ["Colon"]
},
{
match: "^[\\dA-F]{2}(?:\\n[\\dA-F]{2})*$",
pattern: "^[\\dA-F]{2}(?:\\n[\\dA-F]{2})*$",
flags: "i",
args: ["Line feed"]
},
{
match: "^[\\dA-F]{2}(?:\\r\\n[\\dA-F]{2})*$",
pattern: "^[\\dA-F]{2}(?:\\r\\n[\\dA-F]{2})*$",
flags: "i",
args: ["CRLF"]
},
{
match: "^[\\dA-F]{2}(?:0x[\\dA-F]{2})*$",
pattern: "^(?:0x[\\dA-F]{2})+$",
flags: "i",
args: ["0x"]
},
{
match: "^[\\dA-F]{2}(?:\\\\x[\\dA-F]{2})*$",
pattern: "^0x[\\dA-F]{2}(?:,0x[\\dA-F]{2})*$",
flags: "i",
args: ["0x with comma"]
},
{
pattern: "^(?:\\\\x[\\dA-F]{2})+$",
flags: "i",
args: ["\\x"]
}

View File

@@ -26,6 +26,13 @@ class FromHexContent extends Operation {
this.inputType = "string";
this.outputType = "byteArray";
this.args = [];
this.checks = [
{
pattern: "\\|([\\da-f]{2} ?)+\\|",
flags: "i",
args: []
}
];
}
/**

View File

@@ -27,9 +27,9 @@ class FromHexdump extends Operation {
this.inputType = "string";
this.outputType = "byteArray";
this.args = [];
this.patterns = [
this.checks = [
{
match: "^(?:(?:[\\dA-F]{4,16}h?:?)?[ \\t]*((?:[\\dA-F]{2} ){1,8}(?:[ \\t]|[\\dA-F]{2}-)(?:[\\dA-F]{2} ){1,8}|(?:[\\dA-F]{4} )*[\\dA-F]{4}|(?:[\\dA-F]{2} )*[\\dA-F]{2})[^\\n]*\\n?){2,}$",
pattern: "^(?:(?:[\\dA-F]{4,16}h?:?)?[ \\t]*((?:[\\dA-F]{2} ){1,8}(?:[ \\t]|[\\dA-F]{2}-)(?:[\\dA-F]{2} ){1,8}|(?:[\\dA-F]{4} )*[\\dA-F]{4}|(?:[\\dA-F]{2} )*[\\dA-F]{2})[^\\n]*\\n?){2,}$",
flags: "i",
args: []
},

View File

@@ -37,12 +37,12 @@ class FromMorseCode extends Operation {
"value": WORD_DELIM_OPTIONS
}
];
this.patterns = [
this.checks = [
{
match: "(?:^[-. \\n]{5,}$|^[_. \\n]{5,}$|^(?:dash|dot| |\\n){5,}$)",
pattern: "(?:^[-. \\n]{5,}$|^[_. \\n]{5,}$|^(?:dash|dot| |\\n){5,}$)",
flags: "i",
args: ["Space", "Line feed"]
},
}
];
}

View File

@@ -32,37 +32,37 @@ class FromOctal extends Operation {
"value": DELIM_OPTIONS
}
];
this.patterns = [
this.checks = [
{
match: "^(?:[0-7]{1,2}|[123][0-7]{2})(?: (?:[0-7]{1,2}|[123][0-7]{2}))*$",
pattern: "^(?:[0-7]{1,2}|[123][0-7]{2})(?: (?:[0-7]{1,2}|[123][0-7]{2}))*$",
flags: "",
args: ["Space"]
},
{
match: "^(?:[0-7]{1,2}|[123][0-7]{2})(?:,(?:[0-7]{1,2}|[123][0-7]{2}))*$",
pattern: "^(?:[0-7]{1,2}|[123][0-7]{2})(?:,(?:[0-7]{1,2}|[123][0-7]{2}))*$",
flags: "",
args: ["Comma"]
},
{
match: "^(?:[0-7]{1,2}|[123][0-7]{2})(?:;(?:[0-7]{1,2}|[123][0-7]{2}))*$",
pattern: "^(?:[0-7]{1,2}|[123][0-7]{2})(?:;(?:[0-7]{1,2}|[123][0-7]{2}))*$",
flags: "",
args: ["Semi-colon"]
},
{
match: "^(?:[0-7]{1,2}|[123][0-7]{2})(?::(?:[0-7]{1,2}|[123][0-7]{2}))*$",
pattern: "^(?:[0-7]{1,2}|[123][0-7]{2})(?::(?:[0-7]{1,2}|[123][0-7]{2}))*$",
flags: "",
args: ["Colon"]
},
{
match: "^(?:[0-7]{1,2}|[123][0-7]{2})(?:\\n(?:[0-7]{1,2}|[123][0-7]{2}))*$",
pattern: "^(?:[0-7]{1,2}|[123][0-7]{2})(?:\\n(?:[0-7]{1,2}|[123][0-7]{2}))*$",
flags: "",
args: ["Line feed"]
},
{
match: "^(?:[0-7]{1,2}|[123][0-7]{2})(?:\\r\\n(?:[0-7]{1,2}|[123][0-7]{2}))*$",
pattern: "^(?:[0-7]{1,2}|[123][0-7]{2})(?:\\r\\n(?:[0-7]{1,2}|[123][0-7]{2}))*$",
flags: "",
args: ["CRLF"]
},
}
];
}

View File

@@ -28,9 +28,9 @@ class FromQuotedPrintable extends Operation {
this.inputType = "string";
this.outputType = "byteArray";
this.args = [];
this.patterns = [
this.checks = [
{
match: "^[\\x21-\\x3d\\x3f-\\x7e \\t]{0,76}(?:=[\\da-f]{2}|=\\r?\\n)(?:[\\x21-\\x3d\\x3f-\\x7e \\t]|=[\\da-f]{2}|=\\r?\\n)*$",
pattern: "^[\\x21-\\x3d\\x3f-\\x7e \\t]{0,76}(?:=[\\da-f]{2}|=\\r?\\n)(?:[\\x21-\\x3d\\x3f-\\x7e \\t]|=[\\da-f]{2}|=\\r?\\n)*$",
flags: "i",
args: []
},

View File

@@ -33,27 +33,27 @@ class FromUNIXTimestamp extends Operation {
"value": UNITS
}
];
this.patterns = [
this.checks = [
{
match: "^1?\\d{9}$",
pattern: "^1?\\d{9}$",
flags: "",
args: ["Seconds (s)"]
},
{
match: "^1?\\d{12}$",
pattern: "^1?\\d{12}$",
flags: "",
args: ["Milliseconds (ms)"]
},
{
match: "^1?\\d{15}$",
pattern: "^1?\\d{15}$",
flags: "",
args: ["Microseconds (μs)"]
},
{
match: "^1?\\d{18}$",
pattern: "^1?\\d{18}$",
flags: "",
args: ["Nanoseconds (ns)"]
},
}
];
}

View File

@@ -27,12 +27,12 @@ class Gunzip extends Operation {
this.inputType = "ArrayBuffer";
this.outputType = "ArrayBuffer";
this.args = [];
this.patterns = [
this.checks = [
{
match: "^\\x1f\\x8b\\x08",
pattern: "^\\x1f\\x8b\\x08",
flags: "",
args: []
},
}
];
}

View File

@@ -4,10 +4,9 @@
* @license Apache-2.0
*/
import OperationError from "../errors/OperationError.mjs";
import Operation from "../Operation.mjs";
import * as esprima from "esprima";
import escodegen from "escodegen";
import esmangle from "esmangle";
import Terser from "terser";
/**
* JavaScript Minify operation
@@ -34,22 +33,11 @@ class JavaScriptMinify extends Operation {
* @returns {string}
*/
run(input, args) {
let result = "";
const AST = esprima.parseScript(input),
optimisedAST = esmangle.optimize(AST, null),
mangledAST = esmangle.mangle(optimisedAST);
result = escodegen.generate(mangledAST, {
format: {
renumber: true,
hexadecimal: true,
escapeless: true,
compact: true,
semicolons: false,
parentheses: false
}
});
return result;
const result = Terser.minify(input);
if (result.error) {
throw new OperationError(`Error minifying JavaScript. (${result.error})`);
}
return result.code;
}
}

View File

@@ -0,0 +1,47 @@
/**
* @author dmfj [dominic@dmfj.io]
* @copyright Crown Copyright 2020
* @license Apache-2.0
*/
import Operation from "../Operation.mjs";
import OperationError from "../errors/OperationError.mjs";
import BSON from "bson";
/**
* Parse ObjectID timestamp operation
*/
class ParseObjectIDTimestamp extends Operation {
/**
* ParseObjectIDTimestamp constructor
*/
constructor() {
super();
this.name = "Parse ObjectID timestamp";
this.module = "Serialise";
this.description = "Parse timestamp from MongoDB/BSON ObjectID hex string.";
this.infoURL = "https://docs.mongodb.com/manual/reference/method/ObjectId.getTimestamp/";
this.inputType = "string";
this.outputType = "string";
this.args = [];
}
/**
* @param {string} input
* @param {Object[]} args
* @returns {string}
*/
run(input, args) {
try {
const objectId = new BSON.ObjectID(input);
return objectId.getTimestamp().toISOString();
} catch (err) {
throw new OperationError(err);
}
}
}
export default ParseObjectIDTimestamp;

View File

@@ -33,9 +33,9 @@ class ParseQRCode extends Operation {
"value": false
}
];
this.patterns = [
this.checks = [
{
"match": "^(?:\\xff\\xd8\\xff|\\x89\\x50\\x4e\\x47|\\x47\\x49\\x46|.{8}\\x57\\x45\\x42\\x50|\\x42\\x4d)",
"pattern": "^(?:\\xff\\xd8\\xff|\\x89\\x50\\x4e\\x47|\\x47\\x49\\x46|.{8}\\x57\\x45\\x42\\x50|\\x42\\x4d)",
"flags": "",
"args": [false],
"useful": true

View File

@@ -38,6 +38,13 @@ class ParseSSHHostKey extends Operation {
]
}
];
this.checks = [
{
pattern: "^\\s*([A-F\\d]{2}[,;:]){15,}[A-F\\d]{2}\\s*$",
flags: "i",
args: ["Hex"]
}
];
}
/**

View File

@@ -25,6 +25,13 @@ class ParseUNIXFilePermissions extends Operation {
this.inputType = "string";
this.outputType = "string";
this.args = [];
this.checks = [
{
pattern: "^\\s*d[rxw-]{9}\\s*$",
flags: "",
args: []
}
];
}
/**

View File

@@ -25,6 +25,13 @@ class ParseUserAgent extends Operation {
this.inputType = "string";
this.outputType = "string";
this.args = [];
this.checks = [
{
pattern: "^(User-Agent:|Mozilla\\/)[^\\n\\r]+\\s*$",
flags: "i",
args: []
}
];
}
/**

View File

@@ -35,13 +35,11 @@ class ParseX509Certificate extends Operation {
"value": ["PEM", "DER Hex", "Base64", "Raw"]
}
];
this.patterns = [
this.checks = [
{
"match": "^-+BEGIN CERTIFICATE-+\\r?\\n[\\da-z+/\\n\\r]+-+END CERTIFICATE-+\\r?\\n?$",
"pattern": "^-+BEGIN CERTIFICATE-+\\r?\\n[\\da-z+/\\n\\r]+-+END CERTIFICATE-+\\r?\\n?$",
"flags": "i",
"args": [
"PEM"
]
"args": ["PEM"]
}
];
}

View File

@@ -60,6 +60,12 @@ class RawInflate extends Operation {
value: false
}
];
this.checks = [
{
entropyRange: [7.5, 8],
args: [0, 0, INFLATE_BUFFER_TYPE, false, false]
}
];
}
/**

View File

@@ -163,7 +163,7 @@ class RegularExpression extends Operation {
case "List matches with capture groups":
return Utils.escapeHtml(regexList(input, regex, displayTotal, true, true));
default:
return "Error: Invalid output format";
throw new OperationError("Error: Invalid output format");
}
} catch (err) {
throw new OperationError("Invalid regex. Details: " + err.message);

View File

@@ -35,12 +35,15 @@ class RenderImage extends Operation {
"value": ["Raw", "Base64", "Hex"]
}
];
this.patterns = [
this.checks = [
{
"match": "^(?:\\xff\\xd8\\xff|\\x89\\x50\\x4e\\x47|\\x47\\x49\\x46|.{8}\\x57\\x45\\x42\\x50|\\x42\\x4d)",
"flags": "",
"args": ["Raw"],
"useful": true
pattern: "^(?:\\xff\\xd8\\xff|\\x89\\x50\\x4e\\x47|\\x47\\x49\\x46|.{8}\\x57\\x45\\x42\\x50|\\x42\\x4d)",
flags: "",
args: ["Raw"],
useful: true,
output: {
mime: "image"
}
}
];
}

View File

@@ -35,6 +35,13 @@ class StripHTMLTags extends Operation {
"value": true
}
];
this.checks = [
{
pattern: "(</html>|</div>|</body>)",
flags: "i",
args: [true, true]
}
];
}
/**

View File

@@ -24,6 +24,13 @@ class StripHTTPHeaders extends Operation {
this.inputType = "string";
this.outputType = "string";
this.args = [];
this.checks = [
{
pattern: "^HTTP(.|\\s)+?(\\r?\\n){2}",
flags: "",
args: []
}
];
}
/**

View File

@@ -24,9 +24,9 @@ class URLDecode extends Operation {
this.inputType = "string";
this.outputType = "string";
this.args = [];
this.patterns = [
this.checks = [
{
match: ".*(?:%[\\da-f]{2}.*){4}",
pattern: ".*(?:%[\\da-f]{2}.*){4}",
flags: "i",
args: []
},

View File

@@ -27,9 +27,9 @@ class Untar extends Operation {
this.outputType = "List<File>";
this.presentType = "html";
this.args = [];
this.patterns = [
this.checks = [
{
"match": "^.{257}\\x75\\x73\\x74\\x61\\x72",
"pattern": "^.{257}\\x75\\x73\\x74\\x61\\x72",
"flags": "",
"args": []
}

View File

@@ -40,12 +40,12 @@ class Unzip extends Operation {
value: false
}
];
this.patterns = [
this.checks = [
{
match: "^\\x50\\x4b(?:\\x03|\\x05|\\x07)(?:\\x04|\\x06|\\x08)",
pattern: "^\\x50\\x4b(?:\\x03|\\x05|\\x07)(?:\\x04|\\x06|\\x08)",
flags: "",
args: ["", false]
},
}
];
}

View File

@@ -59,9 +59,9 @@ class ZlibInflate extends Operation {
value: false
}
];
this.patterns = [
this.checks = [
{
match: "^\\x78(\\x01|\\x9c|\\xda|\\x5e)",
pattern: "^\\x78(\\x01|\\x9c|\\xda|\\x5e)",
flags: "",
args: [0, 0, "Adaptive", false, false]
},

View File

@@ -1,4 +1,4 @@
import Sitemap from "sitemap";
import sm from "sitemap";
import OperationConfig from "../../core/config/OperationConfig.json";
@@ -10,24 +10,25 @@ import OperationConfig from "../../core/config/OperationConfig.json";
* @license Apache-2.0
*/
const sitemap = Sitemap.createSitemap({
const smStream = new sm.SitemapStream({
hostname: "https://gchq.github.io/CyberChef",
});
sitemap.add({
smStream.write({
url: "/",
changefreq: "weekly",
priority: 1.0
});
for (const op in OperationConfig) {
sitemap.add({
smStream.write({
url: `/?op=${encodeURIComponent(op)}`,
changeFreq: "yearly",
priority: 0.5
});
}
smStream.end();
const xml = sitemap.toString();
console.log(xml); // eslint-disable-line no-console
sm.streamToPromise(smStream).then(
buffer => console.log(buffer.toString()) // eslint-disable-line no-console
);

View File

@@ -51,7 +51,6 @@ class RecipeWaiter {
}
}.bind(this),
onSort: function(evt) {
this.updateZIndices();
if (evt.from.id === "rec-list") {
document.dispatchEvent(this.manager.statechange);
}
@@ -150,19 +149,6 @@ class RecipeWaiter {
}
/**
* Sets the z-index property on each operation to make sure that operations higher in the list
* have a higher index, meaning dropdowns are not hidden underneath subsequent operations.
*/
updateZIndices() {
const operations = document.getElementById("rec-list").children;
for (let i = 0; i < operations.length; i++) {
const operation = operations[i];
operation.style.zIndex = 100 + operations.length - i;
}
}
/**
* Handler for favourite dragover events.
* If the element being dragged is an operation, displays a visual cue so that the user knows it can
@@ -480,7 +466,6 @@ class RecipeWaiter {
log.debug(`'${e.target.querySelector(".op-title").textContent}' added to recipe`);
this.triggerArgEvents(e.target);
this.updateZIndices();
window.dispatchEvent(this.manager.statechange);
}

View File

@@ -97,10 +97,14 @@ class TestRegister {
ret.status = "passing";
} else if ("expectedMatch" in test && test.expectedMatch.test(result.result)) {
ret.status = "passing";
} else if ("unexpectedMatch" in test && !test.unexpectedMatch.test(result.result)) {
ret.status = "passing";
} else {
ret.status = "failing";
const expected = test.expectedOutput ? test.expectedOutput :
test.expectedMatch ? test.expectedMatch.toString() : "unknown";
test.expectedMatch ? test.expectedMatch.toString() :
test.unexpectedMatch ? "to not find " + test.unexpectedMatch.toString() :
"unknown";
ret.output = [
"Expected",
"\t" + expected.replace(/\n/g, "\n\t"),

View File

@@ -100,6 +100,7 @@ import "./tests/Lorenz.mjs";
import "./tests/LuhnChecksum.mjs";
import "./tests/CipherSaber2.mjs";
import "./tests/Colossus.mjs";
import "./tests/ParseObjectIDTimestamp.mjs";
// Cannot test operations that use the File type yet
@@ -120,4 +121,3 @@ const logOpsTestReport = logTestReport.bind(null, testStatus);
const results = await TestRegister.runTests();
logOpsTestReport(results);
})();

File diff suppressed because one or more lines are too long

View File

@@ -92,6 +92,19 @@ TestRegister.addTests([
]
}
]
},
{
name: "0x with Comma to Ascii",
input: "0x74,0x65,0x73,0x74,0x20,0x73,0x74,0x72,0x69,0x6e,0x67",
expectedOutput: "test string",
recipeConfig: [
{
"op": "From Hex",
"args": [
"0x with comma"
]
}
]
}
},
]);

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,24 @@
/**
* Parse ObjectID timestamp tests
*
* @author dmfj [dominic@dmfj.io]
*
* @copyright Crown Copyright 2018
* @license Apache-2.0
*/
import TestRegister from "../../lib/TestRegister.mjs";
TestRegister.addTests([
{
name: "Parse ISO timestamp from ObjectId",
input: "000000000000000000000000",
expectedOutput: "1970-01-01T00:00:00.000Z",
recipeConfig: [
{
op: "Parse ObjectID timestamp",
args: [],
}
],
}
]);