Merge pull request #935 from twiss/fix-armor-checksum-errors

Fix armor checksum errors being ignored when not streaming
This commit is contained in:
Daniel Huigens 2019-07-19 20:08:16 +02:00 committed by GitHub
commit 8585ad8924
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 139 additions and 57 deletions

View File

@ -15,6 +15,28 @@ module.exports = function(grunt) {
// Project configuration.
const dev = !!grunt.option('dev');
const compat = !!grunt.option('compat');
const plugins = compat ? [
"transform-async-to-generator",
"syntax-async-functions",
"transform-regenerator",
"transform-runtime"
] : [];
const presets = [[require.resolve('babel-preset-env'), {
targets: {
browsers: compat ? [
'IE >= 11',
'Safari >= 9',
'Last 2 Chrome versions',
'Last 2 Firefox versions',
'Last 2 Edge versions'
] : [
'Last 2 Chrome versions',
'Last 2 Firefox versions',
'Last 2 Safari versions',
'Last 2 Edge versions'
]
}
}]];
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
browserify: {
@ -49,29 +71,9 @@ module.exports = function(grunt) {
global: true,
// Only babelify web-streams-polyfill, web-stream-tools, asmcrypto, email-addresses and seek-bzip in node_modules
only: /^(?:.*\/node_modules\/@mattiasbuelens\/web-streams-polyfill\/|.*\/node_modules\/web-stream-tools\/|.*\/node_modules\/asmcrypto\.js\/|.*\/node_modules\/email-addresses\/|.*\/node_modules\/seek-bzip\/|(?!.*\/node_modules\/)).*$/,
plugins: compat ? [
"transform-async-to-generator",
"syntax-async-functions",
"transform-regenerator",
"transform-runtime"
] : [],
ignore: ['*.min.js'],
presets: [[require.resolve('babel-preset-env'), {
targets: {
browsers: compat ? [
'IE >= 11',
'Safari >= 9',
'Last 2 Chrome versions',
'Last 2 Firefox versions',
'Last 2 Edge versions'
] : [
'Last 2 Chrome versions',
'Last 2 Firefox versions',
'Last 2 Safari versions',
'Last 2 Edge versions'
]
}
}]]
plugins,
presets
}]
],
plugin: ['browserify-derequire']
@ -97,13 +99,9 @@ module.exports = function(grunt) {
global: true,
// Only babelify chai-as-promised in node_modules
only: /^(?:.*\/node_modules\/chai-as-promised\/|(?!.*\/node_modules\/)).*$/,
plugins: ["transform-async-to-generator",
"syntax-async-functions",
"transform-regenerator",
"transform-runtime",
"transform-remove-strict-mode"],
ignore: ['*.min.js'],
presets: ["env"]
plugins,
presets
}]
]
}

View File

@ -115,22 +115,22 @@ Message.prototype.decrypt = async function(privateKeys, passwords, sessionKeys,
const symEncryptedPacket = symEncryptedPacketlist[0];
let exception = null;
for (let i = 0; i < keyObjs.length; i++) {
if (!keyObjs[i] || !util.isUint8Array(keyObjs[i].data) || !util.isString(keyObjs[i].algorithm)) {
const decryptedPromise = Promise.all(keyObjs.map(async keyObj => {
if (!keyObj || !util.isUint8Array(keyObj.data) || !util.isString(keyObj.algorithm)) {
throw new Error('Invalid session key for decryption.');
}
try {
await symEncryptedPacket.decrypt(keyObjs[i].algorithm, keyObjs[i].data, streaming);
break;
await symEncryptedPacket.decrypt(keyObj.algorithm, keyObj.data, streaming);
} catch (e) {
util.print_debug_error(e);
exception = e;
}
}
}));
// We don't await stream.cancel here because it only returns when the other copy is canceled too.
stream.cancel(symEncryptedPacket.encrypted); // Don't keep copy of encrypted data in memory.
symEncryptedPacket.encrypted = null;
await decryptedPromise;
if (!symEncryptedPacket.packets || !symEncryptedPacket.packets.length) {
throw exception || new Error('Decryption failed.');

View File

@ -389,7 +389,7 @@ export function decrypt({ message, privateKeys, passwords, sessionKeys, publicKe
result.signatures = signature ? await decrypted.verifyDetached(signature, publicKeys, date, streaming) : await decrypted.verify(publicKeys, date, streaming);
result.data = format === 'binary' ? decrypted.getLiteralData() : decrypted.getText();
result.filename = decrypted.getFilename();
if (streaming) linkStreams(result, message, decrypted.packets.stream);
if (streaming) linkStreams(result, message);
result.data = await convertStream(result.data, streaming);
if (!streaming) await prepareSignatures(result.signatures);
return result;
@ -659,25 +659,13 @@ async function convertStreams(obj, streaming, keys=[]) {
/**
* Link result.data to the message stream for cancellation.
* Also, forward errors in the message to result.data.
* @param {Object} result the data to convert
* @param {Message} message message object
* @param {ReadableStream} erroringStream (optional) stream which either errors or gets closed without data
* @returns {Object}
*/
function linkStreams(result, message, erroringStream) {
function linkStreams(result, message) {
result.data = stream.transformPair(message.packets.stream, async (readable, writable) => {
await stream.pipe(result.data, writable, {
preventClose: true
});
const writer = stream.getWriter(writable);
try {
// Forward errors in erroringStream (defaulting to the message stream) to result.data.
await stream.readToEnd(erroringStream || readable, arr => arr);
await writer.close();
} catch(e) {
await writer.abort(e);
}
await stream.pipe(result.data, writable);
});
}

View File

@ -140,6 +140,7 @@ export default {
read: async function(input, streaming, callback) {
const reader = stream.getReader(input);
let writer;
let callbackReturned;
try {
const peekedBytes = await reader.peekBytes(2);
// some sanity checks
@ -168,7 +169,6 @@ export default {
const supportsStreaming = this.supportsStreaming(tag);
let packet = null;
let callbackReturned;
if (streaming && supportsStreaming) {
const transform = new TransformStream();
writer = stream.getWriter(transform.writable);
@ -256,15 +256,48 @@ export default {
}
} while(wasPartialLength);
if (!writer) {
packet = util.concatUint8Array(packet);
await callback({ tag, packet });
}
const nextPacket = await reader.peekBytes(2);
// If this was not a packet that "supports streaming", we peek to check
// whether it is the last packet in the message. We peek 2 bytes instead
// of 1 because the beginning of this function also peeks 2 bytes, and we
// want to cut a `subarray` of the correct length into `web-stream-tools`'
// `externalBuffer` as a tiny optimization here.
//
// If it *was* a streaming packet (i.e. the data packets), we peek at the
// entire remainder of the stream, in order to forward errors in the
// remainder of the stream to the packet data. (Note that this means we
// read/peek at all signature packets before closing the literal data
// packet, for example.) This forwards armor checksum errors to the
// encrypted data stream, for example, so that they don't get lost /
// forgotten on encryptedMessage.packets.stream, which we never look at.
//
// Note that subsequent packet parsing errors could still end up there if
// `config.tolerant` is set to false, or on malformed messages with
// multiple data packets, but usually it shouldn't happen.
//
// An example of what we do when stream-parsing a message containing
// [ one-pass signature packet, literal data packet, signature packet ]:
// 1. Read the one-pass signature packet
// 2. Peek 2 bytes of the literal data packet
// 3. Parse the one-pass signature packet
//
// 4. Read the literal data packet, simultaneously stream-parsing it
// 5. Peek until the end of the message
// 6. Finish parsing the literal data packet
//
// 7. Read the signature packet again (we already peeked at it in step 5)
// 8. Peek at the end of the stream again (`peekBytes` returns undefined)
// 9. Parse the signature packet
//
// Note that this means that if there's an error in the very end of the
// stream, such as an MDC error, we throw in step 5 instead of in step 8
// (or never), which is the point of this exercise.
const nextPacket = await reader.peekBytes(supportsStreaming ? Infinity : 2);
if (writer) {
await writer.ready;
await writer.close();
await callbackReturned;
} else {
packet = util.concatUint8Array(packet);
await callback({ tag, packet });
}
return !nextPacket || !nextPacket.length;
} catch(e) {
@ -275,6 +308,9 @@ export default {
throw e;
}
} finally {
if (writer) {
await callbackReturned;
}
reader.releaseLock();
}
}

View File

@ -109,8 +109,8 @@ SymEncryptedIntegrityProtected.prototype.encrypt = async function (sessionKeyAlg
* @async
*/
SymEncryptedIntegrityProtected.prototype.decrypt = async function (sessionKeyAlgorithm, key, streaming) {
if (!streaming) this.encrypted = await stream.readToEnd(this.encrypted);
const encrypted = stream.clone(this.encrypted);
let encrypted = stream.clone(this.encrypted);
if (!streaming) encrypted = await stream.readToEnd(encrypted);
const decrypted = await crypto.cfb.decrypt(sessionKeyAlgorithm, key, encrypted, new Uint8Array(crypto.cipher[sessionKeyAlgorithm].blockSize));
// there must be a modification detection code packet as the

View File

@ -689,6 +689,7 @@ describe('[Sauce Labs Group 2] OpenPGP.js public api tests', function() {
let publicKey_1337;
let privateKey;
let publicKey;
let publicKeyNoAEAD;
let zero_copyVal;
let use_nativeVal;
let aead_protectVal;
@ -1617,6 +1618,65 @@ describe('[Sauce Labs Group 2] OpenPGP.js public api tests', function() {
expect(decrypted.signatures[1].signature.packets.length).to.equal(1);
});
});
it('should fail to decrypt modified message', async function() {
const { privateKeyArmored } = await openpgp.generateKey({ curve: 'curve25519', userIds: [{ email: 'test@email.com' }] });
const { keys: [key] } = await openpgp.key.readArmored(privateKeyArmored);
const { data } = await openpgp.encrypt({ message: openpgp.message.fromBinary(new Uint8Array(500)), publicKeys: [key.toPublic()] });
let badSumEncrypted = data.replace(/\n=[a-zA-Z0-9/+]{4}/, '\n=aaaa');
if (badSumEncrypted === data) { // checksum was already =aaaa
badSumEncrypted = data.replace(/\n=[a-zA-Z0-9/+]{4}/, '\n=bbbb');
}
if (badSumEncrypted === data) {
throw new Error("Was not able to successfully modify checksum");
}
const badBodyEncrypted = data.replace(/\n=([a-zA-Z0-9/+]{4})/, 'aaa\n=$1');
for (let allow_streaming = 1; allow_streaming >= 0; allow_streaming--) {
openpgp.config.allow_unauthenticated_stream = !!allow_streaming;
if (openpgp.getWorker()) {
openpgp.getWorker().workers.forEach(worker => {
worker.postMessage({ event: 'configure', config: openpgp.config });
});
}
await Promise.all([badSumEncrypted, badBodyEncrypted].map(async (encrypted, i) => {
await Promise.all([
encrypted,
openpgp.stream.toStream(encrypted),
new ReadableStream({
start() {
this.remaining = encrypted.split('\n');
},
async pull(controller) {
if (this.remaining.length) {
await new Promise(res => setTimeout(res));
controller.enqueue(this.remaining.shift() + '\n');
} else {
controller.close();
}
}
})
].map(async (encrypted, j) => {
let stepReached = 0;
try {
const message = await openpgp.message.readArmored(encrypted);
stepReached = 1;
const { data: decrypted } = await openpgp.decrypt({ message: message, privateKeys: [key] });
stepReached = 2;
await openpgp.stream.readToEnd(decrypted);
} catch (e) {
expect(e.message).to.match(/Ascii armor integrity check on message failed/);
expect(stepReached).to.equal(
j === 0 ? 0 :
openpgp.config.aead_chunk_size_byte === 0 || (!openpgp.config.aead_protect && openpgp.config.allow_unauthenticated_stream) ? 2 :
1
);
return;
}
throw new Error(`Expected "Ascii armor integrity check on message failed" error in subtest ${i}.${j}`);
}));
}));
}
});
});
describe('ELG / DSA encrypt, decrypt, sign, verify', function() {