Merge branch 'master' of github.com:gchq/CyberChef into node-lib

This commit is contained in:
d98762625 2019-05-03 13:41:05 +01:00
commit 0a3d87dfe3
19 changed files with 1256 additions and 675 deletions

View File

@ -32,8 +32,9 @@ deploy:
skip_cleanup: true
api_key:
secure: "HV1WSKv4l/0Y2bKKs1iBJocBcmLj08PCRUeEM/jTwA4jqJ8EiLHWiXtER/D5sEg2iibRVKd2OQjfrmS6bo4AiwdeVgAKmv0FtS2Jw+391N8Nd5AkEANHa5Om/IpHLTL2YRAjpJTsDpY72bMUTJIwjQA3TFJkgrpOw6KYfohOcgbxLpZ4XuNJRU3VL4Hsxdv5V9aOVmfFOmMOVPQlakXy7NgtW5POp1f2WJwgcZxylkR1CjwaqMyXmSoVl46pyH3tr5+dptsQoKSGdi6sIHGA60oDotFPcm+0ifa47wZw+vapuuDi4tdNxhrHGaDMG8xiE0WFDHwQUDlk2/+W7j9SEX0H3Em7us371JXRp56EDwEcDa34VpVkC6i8HGcHK55hnxVbMZXGf3qhOFD8wY7qMbjMRvIpucrMHBi86OfkDfv0vDj2LyvIl5APj/AX50BrE0tfH1MZbH26Jkx4NdlkcxQ14GumarmUqfmVvbX/fsoA6oUuAAE9ZgRRi3KHO4wci6KUcRfdm+XOeUkaBFsL86G3EEYIvrtBTuaypdz+Cx7nd1iPZyWMx5Y1gXnVzha4nBdV4+7l9JIsFggD8QVpw2uHXQiS1KXFjOeqA3DBD8tjMB7q26Fl2fD3jkOo4BTbQ2NrRIZUu/iL+fOmMPsyMt2qulB0yaSBCfkbEq8xrUA="
file_glob: true
file:
- build/prod/cyberchef.htm
- build/prod/*.zip
- build/node/CyberChef.js
on:
repo: gchq/CyberChef

View File

@ -2,6 +2,12 @@
All major and minor version changes will be documented in this file. Details of patch-level version changes can be found in [commit messages](https://github.com/gchq/CyberChef/commits/master).
### [8.31.0] - 2019-04-12
- The downloadable version of CyberChef is now a .zip file containing separate modules rather than a single .htm file. It is still completely standalone and will not make any external network requests. This change reduces the complexity of the build process significantly. [@n1474335]
### [8.30.0] - 2019-04-12
- 'Decode Protobuf' operation added [@n1474335] | [#533]
### [8.29.0] - 2019-03-31
- 'BLAKE2s' and 'BLAKE2b' hashing operations added [@h345983745] | [#525]
@ -124,6 +130,8 @@ All major and minor version changes will be documented in this file. Details of
[8.31.0]: https://github.com/gchq/CyberChef/releases/tag/v8.31.0
[8.30.0]: https://github.com/gchq/CyberChef/releases/tag/v8.30.0
[8.29.0]: https://github.com/gchq/CyberChef/releases/tag/v8.29.0
[8.28.0]: https://github.com/gchq/CyberChef/releases/tag/v8.28.0
[8.27.0]: https://github.com/gchq/CyberChef/releases/tag/v8.27.0
@ -223,3 +231,4 @@ All major and minor version changes will be documented in this file. Details of
[#506]: https://github.com/gchq/CyberChef/pull/506
[#516]: https://github.com/gchq/CyberChef/pull/516
[#525]: https://github.com/gchq/CyberChef/pull/525
[#533]: https://github.com/gchq/CyberChef/pull/533

View File

@ -4,7 +4,6 @@ const webpack = require("webpack");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const BundleAnalyzerPlugin = require("webpack-bundle-analyzer").BundleAnalyzerPlugin;
const NodeExternals = require("webpack-node-externals");
const Inliner = require("web-resource-inliner");
const glob = require("glob");
const path = require("path");
const UglifyJSWebpackPlugin = require("uglifyjs-webpack-plugin");
@ -50,18 +49,16 @@ module.exports = function (grunt) {
grunt.registerTask("prod",
"Creates a production-ready build. Use the --msg flag to add a compile message.",
["eslint", "clean:prod", "clean:config", "exec:generateConfig", "webpack:web", "inline", "chmod"]);
[
"eslint", "clean:prod", "clean:config", "exec:generateConfig", "webpack:web",
"copy:standalone", "zip:standalone", "clean:standalone", "chmod"
]);
grunt.registerTask("default",
"Lints the code base",
["eslint", "exec:repoSize"]);
grunt.registerTask("inline",
"Compiles a production build of CyberChef into a single, portable web page.",
["exec:generateConfig", "webpack:webInline", "runInliner", "clean:inlineScripts"]);
grunt.registerTask("runInliner", runInliner);
grunt.registerTask("doc", "docs");
grunt.registerTask("tests", "test");
grunt.registerTask("lint", "eslint");
@ -79,6 +76,7 @@ module.exports = function (grunt) {
grunt.loadNpmTasks("grunt-accessibility");
grunt.loadNpmTasks("grunt-concurrent");
grunt.loadNpmTasks("grunt-contrib-connect");
grunt.loadNpmTasks("grunt-zip");
// Project configuration
@ -101,32 +99,6 @@ module.exports = function (grunt) {
},
moduleEntryPoints = listEntryModules();
/**
* Compiles a production build of CyberChef into a single, portable web page.
*/
function runInliner() {
const done = this.async();
Inliner.html({
relativeTo: "build/prod/",
fileContent: grunt.file.read("build/prod/cyberchef.htm"),
images: true,
svgs: true,
scripts: true,
links: true,
strict: true
}, function(error, result) {
if (error) {
if (error instanceof Error) {
done(error);
} else {
done(new Error(error));
}
} else {
grunt.file.write("build/prod/cyberchef.htm", result);
done(true);
}
});
}
/**
* Generates an entry list for all the modules.
@ -137,7 +109,7 @@ module.exports = function (grunt) {
glob.sync("./src/core/config/modules/*.mjs").forEach(file => {
const basename = path.basename(file);
if (basename !== "Default.mjs" && basename !== "OpModules.mjs")
entryModules[basename.split(".mjs")[0]] = path.resolve(file);
entryModules["modules/" + basename.split(".mjs")[0]] = path.resolve(file);
});
return entryModules;
@ -150,7 +122,7 @@ module.exports = function (grunt) {
node: ["build/node/*"],
config: ["src/core/config/OperationConfig.json", "src/core/config/modules/*", "src/code/operations/index.mjs", "src/node/index.mjs", "src/node/config/OperationConfig.json"],
docs: ["docs/*", "!docs/*.conf.json", "!docs/*.ico", "!docs/*.png"],
inlineScripts: ["build/prod/scripts.js"],
standalone: ["build/prod/CyberChef*.html"]
},
eslint: {
options: {
@ -202,7 +174,10 @@ module.exports = function (grunt) {
}, moduleEntryPoints),
output: {
path: __dirname + "/build/prod",
// globalObject: "this"
filename: chunkData => {
return chunkData.chunk.name === "main" ? "assets/[name].js": "[name].js";
},
globalObject: "this"
},
resolve: {
alias: {
@ -232,46 +207,6 @@ module.exports = function (grunt) {
]
};
},
webInline: {
mode: "production",
target: "web",
entry: "./src/web/index.js",
output: {
filename: "scripts.js",
path: __dirname + "/build/prod"
},
plugins: [
new webpack.DefinePlugin(Object.assign({}, BUILD_CONSTANTS, {
INLINE: "true"
})),
new HtmlWebpackPlugin({
filename: "cyberchef.htm",
template: "./src/web/html/index.html",
compileTime: compileTime,
version: pkg.version + "s",
inline: true,
minify: {
removeComments: true,
collapseWhitespace: true,
minifyJS: true,
minifyCSS: true
}
}),
]
},
tests: {
mode: "development",
target: "node",
entry: "./test/index.mjs",
externals: [NodeExternals()],
output: {
filename: "index.js",
path: __dirname + "/build/test/node"
},
plugins: [
new webpack.DefinePlugin(BUILD_CONSTANTS),
]
},
node: {
mode: NODE_PROD ? "production" : "development",
target: "node",
@ -381,6 +316,18 @@ module.exports = function (grunt) {
}
}
},
zip: {
standalone: {
cwd: "build/prod/",
src: [
"build/prod/**/*",
"!build/prod/index.html",
"!build/prod/BundleAnalyzerReport.html",
"!build/prod/sitemap.js"
],
dest: `build/prod/CyberChef_v${pkg.version}.zip`
}
},
connect: {
prod: {
options: {
@ -393,10 +340,16 @@ module.exports = function (grunt) {
ghPages: {
options: {
process: function (content, srcpath) {
// Add Google Analytics code to index.html
if (srcpath.indexOf("index.html") >= 0) {
// Add Google Analytics code to index.html
content = content.replace("</body></html>",
grunt.file.read("src/web/static/ga.html") + "</body></html>");
// Add Structured Data for SEO
content = content.replace("</head>",
"<script type='application/ld+json'>" +
JSON.stringify(JSON.parse(grunt.file.read("src/web/static/structuredData.json"))) +
"</script></head>");
return grunt.template.process(content, srcpath);
} else {
return content;
@ -415,6 +368,28 @@ module.exports = function (grunt) {
dest: "build/prod/"
},
]
},
standalone: {
options: {
process: function (content, srcpath) {
if (srcpath.indexOf("index.html") >= 0) {
// Replace download link with version number
content = content.replace(/<a [^>]+>Download CyberChef.+?<\/a>/,
`<span>Version ${pkg.version}</span>`);
return grunt.template.process(content, srcpath);
} else {
return content;
}
},
noProcess: ["**", "!**/*.html"]
},
files: [
{
src: "build/prod/index.html",
dest: `build/prod/CyberChef_v${pkg.version}.html`
}
]
}
},
chmod: {
@ -477,7 +452,7 @@ module.exports = function (grunt) {
command: "node --experimental-modules --no-warnings --no-deprecation tests/operations/index.mjs"
},
browserTests: {
command: "./node_modules/.bin/nightwatch --env prod,inline"
command: "./node_modules/.bin/nightwatch --env prod"
},
nodeTests: {
command: "node --experimental-modules --no-warnings --no-deprecation tests/node/index.mjs"

View File

@ -23,10 +23,6 @@
"prod": {
"launch_url": "http://localhost:8000/index.html"
},
"inline": {
"launch_url": "http://localhost:8000/cyberchef.htm"
}
}

1234
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "cyberchef",
"version": "8.29.1",
"version": "8.31.1",
"description": "The Cyber Swiss Army Knife for encryption, encoding, compression and data analysis.",
"author": "n1474335 <n1474335@gmail.com>",
"homepage": "https://gchq.github.io/CyberChef",
@ -31,17 +31,17 @@
"module": "src/node/index.mjs",
"bugs": "https://github.com/gchq/CyberChef/issues",
"devDependencies": {
"@babel/core": "^7.4.0",
"@babel/plugin-transform-runtime": "^7.4.0",
"@babel/preset-env": "^7.4.2",
"autoprefixer": "^9.5.0",
"@babel/core": "^7.4.3",
"@babel/plugin-transform-runtime": "^7.4.3",
"@babel/preset-env": "^7.4.3",
"autoprefixer": "^9.5.1",
"babel-eslint": "^10.0.1",
"babel-loader": "^8.0.5",
"babel-plugin-dynamic-import-node": "^2.2.0",
"chromedriver": "^2.46.0",
"colors": "^1.3.3",
"css-loader": "^2.1.1",
"eslint": "^5.15.3",
"eslint": "^5.16.0",
"exports-loader": "^0.7.0",
"file-loader": "^3.0.1",
"grunt": "^1.0.4",
@ -54,13 +54,14 @@
"grunt-contrib-watch": "^1.1.0",
"grunt-eslint": "^21.0.0",
"grunt-exec": "~3.0.0",
"grunt-jsdoc": "^2.3.0",
"grunt-jsdoc": "^2.3.1",
"grunt-webpack": "^3.1.3",
"grunt-zip": "^0.18.2",
"html-webpack-plugin": "^3.2.0",
"imports-loader": "^0.8.0",
"ink-docstrap": "^1.3.2",
"jsdoc-babel": "^0.5.0",
"mini-css-extract-plugin": "^0.5.0",
"mini-css-extract-plugin": "^0.6.0",
"nightwatch": "^1.0.19",
"node-sass": "^4.11.0",
"postcss-css-variables": "^0.12.0",
@ -73,15 +74,15 @@
"svg-url-loader": "^2.3.2",
"uglifyjs-webpack-plugin": "^2.0.1",
"url-loader": "^1.1.2",
"web-resource-inliner": "^4.3.1",
"webpack": "^4.29.6",
"webpack-bundle-analyzer": "^3.1.0",
"webpack-dev-server": "^3.2.1",
"webpack-bundle-analyzer": "^3.3.2",
"webpack-dev-server": "^3.3.1",
"webpack-node-externals": "^1.7.2",
"worker-loader": "^2.0.0"
},
"dependencies": {
"@babel/runtime": "^7.4.2",
"@babel/polyfill": "^7.4.3",
"@babel/runtime": "^7.4.3",
"arrive": "^2.4.1",
"babel-plugin-transform-builtin-extend": "1.1.2",
"bcryptjs": "^2.4.3",
@ -93,7 +94,7 @@
"bson": "^4.0.2",
"chi-squared": "^1.1.0",
"clippyjs": "0.0.3",
"core-js": "^3.0.0",
"core-js": "^3.0.1",
"crypto-api": "^0.8.3",
"crypto-js": "^3.1.9-1",
"ctph.js": "0.0.5",
@ -108,8 +109,8 @@
"file-saver": "^2.0.1",
"geodesy": "^1.1.3",
"highlight.js": "^9.15.6",
"jimp": "^0.6.0",
"jquery": "^3.3.1",
"jimp": "^0.6.1",
"jquery": "3.3.1",
"js-crc": "^0.2.0",
"js-sha3": "^0.8.0",
"jsesc": "^2.5.2",
@ -131,7 +132,7 @@
"notepack.io": "^2.2.0",
"nwmatcher": "^1.4.4",
"otp": "^0.1.3",
"popper.js": "^1.14.7",
"popper.js": "^1.15.0",
"qr-image": "^3.2.0",
"scryptsy": "^2.0.0",
"snackbarjs": "^1.1.0",

View File

@ -178,7 +178,7 @@ self.loadRequiredModules = function(recipeConfig) {
if (!OpModules.hasOwnProperty(module)) {
log.info(`Loading ${module} module`);
self.sendStatusMessage(`Loading ${module} module`);
self.importScripts(`${self.docURL}/${module}.js`);
self.importScripts(`${self.docURL}/modules/${module}.js`);
self.sendStatusMessage("");
}
});

View File

@ -169,6 +169,9 @@
"Parse URI",
"URL Encode",
"URL Decode",
"Protobuf Decode",
"VarInt Encode",
"VarInt Decode",
"Format MAC addresses",
"Change IP format",
"Group IP addresses",

285
src/core/lib/Protobuf.mjs Normal file
View File

@ -0,0 +1,285 @@
import Utils from "../Utils";
/**
* Protobuf lib. Contains functions to decode protobuf serialised
* data without a schema or .proto file.
*
* Provides utility functions to encode and decode variable length
* integers (varint).
*
* @author GCHQ Contributor [3]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
class Protobuf {
/**
* Protobuf constructor
*
* @param {byteArray} data
*/
constructor(data) {
// Check we have a byteArray
if (data instanceof Array) {
this.data = data;
} else {
throw new Error("Protobuf input must be a byteArray");
}
// Set up masks
this.TYPE = 0x07;
this.NUMBER = 0x78;
this.MSB = 0x80;
this.VALUE = 0x7f;
// Declare offset and length
this.offset = 0;
this.LENGTH = data.length;
}
// Public Functions
/**
* Encode a varint from a number
*
* @param {number} number
* @returns {byteArray}
*/
static varIntEncode(number) {
const MSB = 0x80,
VALUE = 0x7f,
MSBALL = ~VALUE,
INT = Math.pow(2, 31);
const out = [];
let offset = 0;
while (number >= INT) {
out[offset++] = (number & 0xff) | MSB;
number /= 128;
}
while (number & MSBALL) {
out[offset++] = (number & 0xff) | MSB;
number >>>= 7;
}
out[offset] = number | 0;
return out;
}
/**
* Decode a varint from the byteArray
*
* @param {byteArray} input
* @returns {number}
*/
static varIntDecode(input) {
const pb = new Protobuf(input);
return pb._varInt();
}
/**
* Parse Protobuf data
*
* @param {byteArray} input
* @returns {Object}
*/
static decode(input) {
const pb = new Protobuf(input);
return pb._parse();
}
// Private Class Functions
/**
* Main private parsing function
*
* @private
* @returns {Object}
*/
_parse() {
let object = {};
// Continue reading whilst we still have data
while (this.offset < this.LENGTH) {
const field = this._parseField();
object = this._addField(field, object);
}
// Throw an error if we have gone beyond the end of the data
if (this.offset > this.LENGTH) {
throw new Error("Exhausted Buffer");
}
return object;
}
/**
* Add a field read from the protobuf data into the Object. As
* protobuf fields can appear multiple times, if the field already
* exists we need to add the new field into an array of fields
* for that key.
*
* @private
* @param {Object} field
* @param {Object} object
* @returns {Object}
*/
_addField(field, object) {
// Get the field key/values
const key = field.key;
const value = field.value;
object[key] = object.hasOwnProperty(key) ?
object[key] instanceof Array ?
object[key].concat([value]) :
[object[key], value] :
value;
return object;
}
/**
* Parse a field and return the Object read from the record
*
* @private
* @returns {Object}
*/
_parseField() {
// Get the field headers
const header = this._fieldHeader();
const type = header.type;
const key = header.key;
switch (type) {
// varint
case 0:
return { "key": key, "value": this._varInt() };
// fixed 64
case 1:
return { "key": key, "value": this._uint64() };
// length delimited
case 2:
return { "key": key, "value": this._lenDelim() };
// fixed 32
case 5:
return { "key": key, "value": this._uint32() };
// unknown type
default:
throw new Error("Unknown type 0x" + type.toString(16));
}
}
/**
* Parse the field header and return the type and key
*
* @private
* @returns {Object}
*/
_fieldHeader() {
// Make sure we call type then number to preserve offset
return { "type": this._fieldType(), "key": this._fieldNumber() };
}
/**
* Parse the field type from the field header. Type is stored in the
* lower 3 bits of the tag byte. This does not move the offset on as
* we need to read the field number from the tag byte too.
*
* @private
* @returns {number}
*/
_fieldType() {
// Field type stored in lower 3 bits of tag byte
return this.data[this.offset] & this.TYPE;
}
/**
* Parse the field number (i.e. the key) from the field header. The
* field number is stored in the upper 5 bits of the tag byte - but
* is also varint encoded so the follow on bytes may need to be read
* when field numbers are > 15.
*
* @private
* @returns {number}
*/
_fieldNumber() {
let shift = -3;
let fieldNumber = 0;
do {
fieldNumber += shift < 28 ?
shift === -3 ?
(this.data[this.offset] & this.NUMBER) >> -shift :
(this.data[this.offset] & this.VALUE) << shift :
(this.data[this.offset] & this.VALUE) * Math.pow(2, shift);
shift += 7;
} while ((this.data[this.offset++] & this.MSD) === this.MSB);
return fieldNumber;
}
// Field Parsing Functions
/**
* Read off a varint from the data
*
* @private
* @returns {number}
*/
_varInt() {
let value = 0;
let shift = 0;
// Keep reading while upper bit set
do {
value += shift < 28 ?
(this.data[this.offset] & this.VALUE) << shift :
(this.data[this.offset] & this.VALUE) * Math.pow(2, shift);
shift += 7;
} while ((this.data[this.offset++] & this.MSB) === this.MSB);
return value;
}
/**
* Read off a 64 bit unsigned integer from the data
*
* @private
* @returns {number}
*/
_uint64() {
// Read off a Uint64
let num = this.data[this.offset++] * 0x1000000 + (this.data[this.offset++] << 16) + (this.data[this.offset++] << 8) + this.data[this.offset++];
num = num * 0x100000000 + this.data[this.offset++] * 0x1000000 + (this.data[this.offset++] << 16) + (this.data[this.offset++] << 8) + this.data[this.offset++];
return num;
}
/**
* Read off a length delimited field from the data
*
* @private
* @returns {Object|string}
*/
_lenDelim() {
// Read off the field length
const length = this._varInt();
const fieldBytes = this.data.slice(this.offset, this.offset + length);
let field;
try {
// Attempt to parse as a new Protobuf Object
const pbObject = new Protobuf(fieldBytes);
field = pbObject._parse();
} catch (err) {
// Otherwise treat as bytes
field = Utils.byteArrayToChars(fieldBytes);
}
// Move the offset and return the field
this.offset += length;
return field;
}
/**
* Read a 32 bit unsigned integer from the data
*
* @private
* @returns {number}
*/
_uint32() {
// Use a dataview to read off the integer
const dataview = new DataView(new Uint8Array(this.data.slice(this.offset, this.offset + 4)).buffer);
const value = dataview.getUint32(0);
this.offset += 4;
return value;
}
}
export default Protobuf;

View File

@ -0,0 +1,46 @@
/**
* @author GCHQ Contributor [3]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import OperationError from "../errors/OperationError";
import Protobuf from "../lib/Protobuf";
/**
* Protobuf Decode operation
*/
class ProtobufDecode extends Operation {
/**
* ProtobufDecode constructor
*/
constructor() {
super();
this.name = "Protobuf Decode";
this.module = "Default";
this.description = "Decodes any Protobuf encoded data to a JSON representation of the data using the field number as the field key.";
this.infoURL = "https://wikipedia.org/wiki/Protocol_Buffers";
this.inputType = "byteArray";
this.outputType = "JSON";
this.args = [];
}
/**
* @param {byteArray} input
* @param {Object[]} args
* @returns {JSON}
*/
run(input, args) {
try {
return Protobuf.decode(input);
} catch (err) {
throw new OperationError(err);
}
}
}
export default ProtobufDecode;

View File

@ -230,6 +230,7 @@ function regexHighlight (input, regex, displayTotal) {
title = "",
hl = 1,
total = 0;
const captureGroups = [];
output = input.replace(regex, (match, ...args) => {
args.pop(); // Throw away full string
@ -247,9 +248,15 @@ function regexHighlight (input, regex, displayTotal) {
// Switch highlight
hl = hl === 1 ? 2 : 1;
total++;
// Store highlighted match and replace with a placeholder
captureGroups.push(`<span class='hl${hl}' title='${title}'>${Utils.escapeHtml(match)}</span>`);
return `[cc_capture_group_${total++}]`;
});
return `<span class='hl${hl}' title='${title}'>${Utils.escapeHtml(match)}</span>`;
// Safely escape all remaining text, then replace placeholders
output = Utils.escapeHtml(output);
output = output.replace(/\[cc_capture_group_(\d+)\]/g, (_, i) => {
return captureGroups[i];
});
if (displayTotal)

View File

@ -79,7 +79,7 @@ class TextEncodingBruteForce extends Operation {
let table = "<table class='table table-hover table-sm table-bordered table-nonfluid'><tr><th>Encoding</th><th>Value</th></tr>";
for (const enc in encodings) {
const value = Utils.printable(encodings[enc], true);
const value = Utils.escapeHtml(Utils.printable(encodings[enc], true));
table += `<tr><td>${enc}</td><td>${value}</td></tr>`;
}

View File

@ -0,0 +1,46 @@
/**
* @author GCHQ Contributor [3]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import OperationError from "../errors/OperationError";
import Protobuf from "../lib/Protobuf";
/**
* VarInt Decode operation
*/
class VarIntDecode extends Operation {
/**
* VarIntDecode constructor
*/
constructor() {
super();
this.name = "VarInt Decode";
this.module = "Default";
this.description = "Decodes a VarInt encoded integer. VarInt is an efficient way of encoding variable length integers and is commonly used with Protobuf.";
this.infoURL = "https://developers.google.com/protocol-buffers/docs/encoding#varints";
this.inputType = "byteArray";
this.outputType = "number";
this.args = [];
}
/**
* @param {byteArray} input
* @param {Object[]} args
* @returns {number}
*/
run(input, args) {
try {
return Protobuf.varIntDecode(input);
} catch (err) {
throw new OperationError(err);
}
}
}
export default VarIntDecode;

View File

@ -0,0 +1,46 @@
/**
* @author GCHQ Contributor [3]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import Operation from "../Operation";
import OperationError from "../errors/OperationError";
import Protobuf from "../lib/Protobuf";
/**
* VarInt Encode operation
*/
class VarIntEncode extends Operation {
/**
* VarIntEncode constructor
*/
constructor() {
super();
this.name = "VarInt Encode";
this.module = "Default";
this.description = "Encodes a Vn integer as a VarInt. VarInt is an efficient way of encoding variable length integers and is commonly used with Protobuf.";
this.infoURL = "https://developers.google.com/protocol-buffers/docs/encoding#varints";
this.inputType = "number";
this.outputType = "byteArray";
this.args = [];
}
/**
* @param {number} input
* @param {Object[]} args
* @returns {byteArray}
*/
run(input, args) {
try {
return Protobuf.varIntEncode(input);
} catch (err) {
throw new OperationError(err);
}
}
}
export default VarIntEncode;

View File

@ -338,7 +338,7 @@ class ControlsWaiter {
const saveLink = this.generateStateUrl(true, true, null, "https://gchq.github.io/CyberChef/");
if (reportBugInfo) {
reportBugInfo.innerHTML = `* Version: ${PKG_VERSION + (typeof INLINE === "undefined" ? "" : "s")}
reportBugInfo.innerHTML = `* Version: ${PKG_VERSION}
* Compile time: ${COMPILE_TIME}
* User-Agent:
${navigator.userAgent}

View File

@ -131,13 +131,6 @@
};
window.addEventListener("error", loadingErrorHandler);
</script>
<% if (htmlWebpackPlugin.options.inline) { %>
<meta name="robots" content="noindex" />
<% } else { %>
<script type="application/ld+json">
<% print(JSON.stringify(require("../static/structuredData.json"))); %>
</script>
<% } %>
</head>
<body>
<!-- Preloader overlay -->
@ -153,11 +146,7 @@
<div id="content-wrapper">
<div id="banner" class="row">
<div class="col" style="text-align: left; padding-left: 10px;">
<% if (htmlWebpackPlugin.options.inline) { %>
<span>Version <%= htmlWebpackPlugin.options.version %></span>
<% } else { %>
<a href="cyberchef.htm" download>Download CyberChef <i class="material-icons">file_download</i></a>
<% } %>
<a href="CyberChef_v<%= htmlWebpackPlugin.options.version %>.zip" download>Download CyberChef <i class="material-icons">file_download</i></a>
</div>
<div class="col-md-6" id="notice-wrapper">
<span id="notice">

View File

@ -94,6 +94,7 @@ import "./tests/MultipleBombe";
import "./tests/Typex";
import "./tests/BLAKE2b";
import "./tests/BLAKE2s";
import "./tests/Protobuf";
// Cannot test operations that use the File type yet
//import "./tests/SplitColourChannels";

View File

@ -0,0 +1,36 @@
/**
* Protobuf tests.
*
* @author n1474335 [n1474335@gmail.com]
* @copyright Crown Copyright 2019
* @license Apache-2.0
*/
import TestRegister from "../TestRegister";
TestRegister.addTests([
{
name: "Protobuf Decode",
input: "0d1c0000001203596f751a024d65202b2a0a0a066162633132331200",
expectedOutput: JSON.stringify({
"1": 469762048,
"2": "You",
"3": "Me",
"4": 43,
"5": {
"1": "abc123",
"2": {}
}
}, null, 4),
recipeConfig: [
{
"op": "From Hex",
"args": ["Auto"]
},
{
"op": "Protobuf Decode",
"args": []
}
]
},
]);

View File

@ -48,7 +48,7 @@ module.exports = {
"process.browser": "true"
}),
new MiniCssExtractPlugin({
filename: "[name].css"
filename: "assets/[name].css"
}),
],
resolve: {
@ -80,7 +80,12 @@ module.exports = {
{
test: /\.css$/,
use: [
MiniCssExtractPlugin.loader,
{
loader: MiniCssExtractPlugin.loader,
options: {
publicPath: "../"
}
},
"css-loader",
"postcss-loader",
]
@ -88,7 +93,12 @@ module.exports = {
{
test: /\.scss$/,
use: [
MiniCssExtractPlugin.loader,
{
loader: MiniCssExtractPlugin.loader,
options: {
publicPath: "../"
}
},
"css-loader",
"sass-loader",
]
@ -97,7 +107,9 @@ module.exports = {
test: /\.(ico|eot|ttf|woff|woff2)$/,
loader: "url-loader",
options: {
limit: 10000
limit: 10000,
name: "[hash].[ext]",
outputPath: "assets"
}
},
{
@ -120,7 +132,9 @@ module.exports = {
exclude: /web\/static/,
loader: "url-loader",
options: {
limit: 10000
limit: 10000,
name: "[hash].[ext]",
outputPath: "assets"
}
},
]