diff --git a/_locales/en/messages.json b/_locales/en/messages.json index 9832d1469..7a960d42d 100644 --- a/_locales/en/messages.json +++ b/_locales/en/messages.json @@ -2571,6 +2571,14 @@ "messageformat": "This message was deleted.", "description": "Shown in a message's bubble when the message has been deleted for everyone." }, + "icu:message--attachmentTooBig--one": { + "messageformat": "Attachment too large to display.", + "description": "Shown in a message bubble if no attachments are left on message when too-large attachments are dropped" + }, + "icu:message--attachmentTooBig--multiple": { + "messageformat": "Some attachments are too large to display.", + "description": "Shown in a message bubble if any attachments are left on message when too-large attachments are dropped" + }, "icu:donation--missing": { "messageformat": "Unable to fetch donation details", "description": "Aria label for donation when we can't fetch the details." diff --git a/stylesheets/_modules.scss b/stylesheets/_modules.scss index dedec5c85..f39936660 100644 --- a/stylesheets/_modules.scss +++ b/stylesheets/_modules.scss @@ -558,6 +558,60 @@ $message-padding-horizontal: 12px; } } +.module-message__attachment-too-big { + user-select: none; + + margin-inline: -$message-padding-horizontal; + margin-top: -$message-padding-vertical; + margin-bottom: -$message-padding-vertical; + padding-top: $message-padding-vertical; + padding-bottom: $message-padding-vertical; + padding-inline: $message-padding-horizontal; + + border-radius: 18px; + + @include font-body-1-italic; + + @include light-theme { + color: $color-gray-90; + border: 1px solid $color-gray-05; + background-color: $color-white; + background-image: none; + } + + @include dark-theme { + color: $color-gray-05; + border: 1px solid $color-gray-75; + background-color: $color-gray-95; + background-image: none; + } +} + +.module-message__attachment-too-big--content-above { + border-top-left-radius: 0; + border-top-right-radius: 0; +} +.module-message__attachment-too-big--content-below { + border-bottom-left-radius: 0; + border-bottom-right-radius: 0; + + border-bottom: none; + margin-bottom: 7px; +} + +.module-message__attachment-too-big--collapse-above--incoming { + border-top-left-radius: 4px; +} +.module-message__attachment-too-big--collapse-above--outgoing { + border-top-right-radius: 4px; +} +.module-message__attachment-too-big--collapse-below--incoming { + border-bottom-left-radius: 4px; +} +.module-message__attachment-too-big--collapse-below--outgoing { + border-bottom-right-radius: 4px; +} + .module-message__tap-to-view { margin-top: 2px; display: flex; @@ -1165,7 +1219,7 @@ $message-padding-horizontal: 12px; pointer-events: none; } -.module-message__metadata--deleted-for-everyone { +.module-message__metadata--outline-only-bubble { @include light-theme { color: $color-gray-60; } @@ -1207,7 +1261,7 @@ $message-padding-horizontal: 12px; color: $color-white-alpha-80; } } -.module-message__metadata__date--deleted-for-everyone { +.module-message__metadata__date--outline-only-bubble { @include light-theme { color: $color-gray-60; } @@ -1319,7 +1373,7 @@ $message-padding-horizontal: 12px; } } -.module-message__metadata__status-icon--deleted-for-everyone { +.module-message__metadata__status-icon--outline-only-bubble { @include light-theme { background-color: $color-gray-60; } @@ -1916,7 +1970,7 @@ $timer-icons: '55', '50', '45', '40', '35', '30', '25', '20', '15', '10', '05', } } -.module-expire-timer--deleted-for-everyone { +.module-expire-timer--outline-only-bubble { @include light-theme { background-color: $color-gray-60; } @@ -2662,7 +2716,7 @@ button.ConversationDetails__action-button { .module-image__border-overlay--with-border { @include light-theme { - box-shadow: inset 0px 0px 0px 1px $color-black-alpha-20; + box-shadow: inset 0px 0px 0px 1px $color-black-alpha-085; } @include dark-theme { box-shadow: inset 0px 0px 0px 1px $color-white-alpha-20; diff --git a/stylesheets/_variables.scss b/stylesheets/_variables.scss index 2a63fe6be..b5a466f1f 100644 --- a/stylesheets/_variables.scss +++ b/stylesheets/_variables.scss @@ -48,6 +48,8 @@ $color-white-alpha-90: rgba($color-white, 0.9); $color-black-alpha-05: rgba($color-black, 0.05); $color-black-alpha-06: rgba($color-black, 0.06); $color-black-alpha-08: rgba($color-black, 0.08); +// Equivalent to gray-05 on a white background +$color-black-alpha-085: rgba($color-black, 0.085); $color-black-alpha-12: rgba($color-black, 0.12); $color-black-alpha-16: rgba($color-black, 0.16); $color-black-alpha-20: rgba($color-black, 0.2); diff --git a/ts/AttachmentCrypto.ts b/ts/AttachmentCrypto.ts new file mode 100644 index 000000000..9207cbeb1 --- /dev/null +++ b/ts/AttachmentCrypto.ts @@ -0,0 +1,784 @@ +// Copyright 2020 Signal Messenger, LLC +// SPDX-License-Identifier: AGPL-3.0-only + +/* eslint-disable max-classes-per-file */ + +import { + existsSync, + createReadStream, + createWriteStream, + unlinkSync, +} from 'fs'; +import { + createDecipheriv, + createCipheriv, + createHash, + createHmac, +} from 'crypto'; +import type { Cipher, Decipher, Hash, Hmac } from 'crypto'; +import { ensureFile } from 'fs-extra'; +import { Transform } from 'stream'; +import { pipeline } from 'stream/promises'; + +import * as log from './logging/log'; +import * as Errors from './types/errors'; +import { HashType, CipherType } from './types/Crypto'; + +import { createName, getRelativePath } from './windows/attachments'; +import { + constantTimeEqual, + getAttachmentSizeBucket, + getRandomBytes, + getZeroes, +} from './Crypto'; +import { Environment } from './environment'; + +// This file was split from ts/Crypto.ts because it pulls things in from node, and +// too many things pull in Crypto.ts, so it broke storybook. + +export const IV_LENGTH = 16; +export const KEY_LENGTH = 32; +export const ATTACHMENT_MAC_LENGTH = 32; + +export type EncryptedAttachmentV2 = { + path: string; + digest: Uint8Array; +}; + +export async function encryptAttachmentV2({ + keys, + plaintextAbsolutePath, + size, + dangerousTestOnlyIv, +}: { + keys: Readonly; + plaintextAbsolutePath: string; + size: number; + dangerousTestOnlyIv?: Readonly; +}): Promise { + const logId = 'encryptAttachmentV2'; + if (keys.byteLength !== KEY_LENGTH * 2) { + throw new Error(`${logId}: Got invalid length attachment keys`); + } + if (!existsSync(plaintextAbsolutePath)) { + throw new Error(`${logId}: Target path doesn't exist!`); + } + + // Create random output file + const relativeTargetPath = getRelativePath(createName()); + const absoluteTargetPath = + window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath); + await ensureFile(absoluteTargetPath); + + // Create start and end streams + const readStream = createReadStream(plaintextAbsolutePath); + const writeStream = createWriteStream(absoluteTargetPath); + + const aesKey = keys.slice(0, KEY_LENGTH); + const macKey = keys.slice(KEY_LENGTH, KEY_LENGTH * 2); + + if (dangerousTestOnlyIv && window.getEnvironment() !== Environment.Test) { + throw new Error(`${logId}: Used dangerousTestOnlyIv outside tests!`); + } + const iv = dangerousTestOnlyIv || getRandomBytes(16); + + const addPaddingTransform = new AddPaddingTransform(size); + const cipherTransform = new CipherTransform(iv, aesKey); + const addIvTransform = new AddIvTransform(iv); + const addMacTransform = new AddMacTransform(macKey); + const digestTransform = new DigestTransform(); + + try { + await pipeline( + readStream, + addPaddingTransform, + cipherTransform, + addIvTransform, + addMacTransform, + digestTransform, + writeStream + ); + } catch (error) { + try { + readStream.close(); + writeStream.close(); + } catch (cleanupError) { + log.error( + `${logId}: Failed to clean up after error`, + Errors.toLogFormat(cleanupError) + ); + } + + if (existsSync(absoluteTargetPath)) { + unlinkSync(absoluteTargetPath); + } + + throw error; + } + + const { ourDigest } = digestTransform; + if (!ourDigest || !ourDigest.byteLength) { + throw new Error(`${logId}: Failed to generate ourDigest!`); + } + + writeStream.close(); + readStream.close(); + + return { + path: relativeTargetPath, + digest: ourDigest, + }; +} + +export async function decryptAttachmentV2({ + ciphertextPath, + id, + keys, + size, + theirDigest, +}: { + ciphertextPath: string; + id: string; + keys: Readonly; + size: number; + theirDigest: Readonly; +}): Promise { + const logId = `decryptAttachmentV2(${id})`; + if (keys.byteLength !== KEY_LENGTH * 2) { + throw new Error(`${logId}: Got invalid length attachment keys`); + } + if (!existsSync(ciphertextPath)) { + throw new Error(`${logId}: Target path doesn't exist!`); + } + + // Create random output file + const relativeTargetPath = getRelativePath(createName()); + const absoluteTargetPath = + window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath); + await ensureFile(absoluteTargetPath); + + // Create start and end streams + const readStream = createReadStream(ciphertextPath); + const writeStream = createWriteStream(absoluteTargetPath); + + const aesKey = keys.slice(0, KEY_LENGTH); + const macKey = keys.slice(KEY_LENGTH, KEY_LENGTH * 2); + + const digestTransform = new DigestTransform(); + const macTransform = new MacTransform(macKey); + const decipherTransform = new DecipherTransform(aesKey); + const coreDecryptionTransform = new CoreDecryptionTransform( + decipherTransform + ); + const limitLengthTransform = new LimitLengthTransform(size); + + try { + await pipeline( + readStream, + digestTransform, + macTransform, + coreDecryptionTransform, + decipherTransform, + limitLengthTransform, + writeStream + ); + } catch (error) { + try { + readStream.close(); + writeStream.close(); + } catch (cleanupError) { + log.error( + `${logId}: Failed to clean up after error`, + Errors.toLogFormat(cleanupError) + ); + } + + if (existsSync(absoluteTargetPath)) { + unlinkSync(absoluteTargetPath); + } + + throw error; + } + + const { ourMac } = macTransform; + const { theirMac } = coreDecryptionTransform; + if (!ourMac || !ourMac.byteLength) { + throw new Error(`${logId}: Failed to generate ourMac!`); + } + if (!theirMac || !theirMac.byteLength) { + throw new Error(`${logId}: Failed to find theirMac!`); + } + if (!constantTimeEqual(ourMac, theirMac)) { + throw new Error(`${logId}: Bad MAC`); + } + + const { ourDigest } = digestTransform; + if (!ourDigest || !ourDigest.byteLength) { + throw new Error(`${logId}: Failed to generate ourDigest!`); + } + if (!constantTimeEqual(ourDigest, theirDigest)) { + throw new Error(`${logId}: Bad digest`); + } + + writeStream.close(); + readStream.close(); + + return relativeTargetPath; +} + +// A very simple transform that doesn't modify the stream, but does calculate a digest +// across all data it gets. +class DigestTransform extends Transform { + private digestBuilder: Hash; + public ourDigest: Uint8Array | undefined; + + constructor() { + super(); + this.digestBuilder = createHash(HashType.size256); + } + + override _flush(done: (error?: Error) => void) { + try { + this.ourDigest = this.digestBuilder.digest(); + } catch (error) { + done(error); + return; + } + + done(); + } + + override _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!chunk || chunk.byteLength === 0) { + done(); + return; + } + + try { + this.digestBuilder.update(chunk); + this.push(chunk); + } catch (error) { + done(error); + return; + } + + done(); + } +} + +// A more complex transform that also doesn't modify the stream, calculating an HMAC +// across everything but the last bytes of the stream. +class MacTransform extends Transform { + public ourMac: Uint8Array | undefined; + private macBuilder: Hmac; + private lastBytes: Uint8Array | undefined; + + constructor(macKey: Uint8Array) { + super(); + + if (macKey.byteLength !== KEY_LENGTH) { + throw new Error( + `MacTransform: macKey should be ${KEY_LENGTH} bytes, got ${macKey.byteLength} bytes` + ); + } + + this.macBuilder = createHmac('sha256', Buffer.from(macKey)); + } + + override _flush(done: (error?: Error) => void) { + try { + this.ourMac = this.macBuilder.digest(); + } catch (error) { + done(error); + return; + } + + done(); + } + + override _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!chunk || chunk.byteLength === 0) { + done(); + return; + } + + try { + // We'll continue building up data if our chunk sizes are too small to fit MAC + const data = this.lastBytes + ? Buffer.concat([this.lastBytes, chunk]) + : chunk; + + // Compute new last bytes from this chunk + const lastBytesIndex = Math.max( + 0, + data.byteLength - ATTACHMENT_MAC_LENGTH + ); + this.lastBytes = data.subarray(lastBytesIndex); + + // Update hmac with data we know is not the last bytes + if (lastBytesIndex > 0) { + this.macBuilder.update(data.subarray(0, lastBytesIndex)); + } + + this.push(chunk); + } catch (error) { + done(error); + return; + } + + done(); + } +} + +// The core of the decryption algorithm - it grabs the iv and initializes the +// DecipherTransform provided to it. It also modifies the stream, only passing on the +// data between the iv and the mac at the end. +class CoreDecryptionTransform extends Transform { + private lastBytes: Uint8Array | undefined; + + public iv: Uint8Array | undefined; + public theirMac: Uint8Array | undefined; + + constructor(private decipherTransform: DecipherTransform) { + super(); + } + + override _flush(done: (error?: Error) => void) { + try { + if ( + !this.lastBytes || + this.lastBytes.byteLength !== ATTACHMENT_MAC_LENGTH + ) { + throw new Error( + `CoreDecryptionTransform: didn't get expected ${ATTACHMENT_MAC_LENGTH} bytes for mac, got ${this.lastBytes?.byteLength}!` + ); + } + + this.theirMac = this.lastBytes; + } catch (error) { + done(error); + return; + } + + done(); + } + + override _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!chunk || chunk.byteLength === 0) { + done(); + return; + } + + try { + let data = chunk; + + // Grab the first bytes from data if we haven't already + if (!this.iv) { + this.iv = chunk.subarray(0, IV_LENGTH); + data = chunk.subarray(IV_LENGTH); + + if (this.iv.byteLength !== IV_LENGTH) { + throw new Error( + `CoreDecryptionTransform: didn't get expected ${IV_LENGTH} bytes for iv, got ${this.iv.byteLength}!` + ); + } + + this.decipherTransform.initializeDecipher(this.iv); + } + + // Add previous last bytes to this new chunk + if (this.lastBytes) { + data = Buffer.concat([this.lastBytes, data]); + } + + // Compute new last bytes from this chunk - if this chunk doesn't fit the MAC, we + // build across multiple chunks to get there. + const macIndex = Math.max(0, data.byteLength - ATTACHMENT_MAC_LENGTH); + this.lastBytes = data.subarray(macIndex); + + if (macIndex > 0) { + this.push(data.subarray(0, macIndex)); + } + } catch (error) { + done(error); + return; + } + + done(); + } +} + +// The transform that does the actual deciphering. It doesn't have enough information to +// start working until the first chunk is processed upstream, hence its public +// initializeDecipher() function. +class DecipherTransform extends Transform { + private decipher: Decipher | undefined; + + constructor(private aesKey: Uint8Array) { + super(); + + if (aesKey.byteLength !== KEY_LENGTH) { + throw new Error( + `DecipherTransform: aesKey should be ${KEY_LENGTH} bytes, got ${aesKey.byteLength} bytes` + ); + } + } + + public initializeDecipher(iv: Uint8Array) { + if (iv.byteLength !== IV_LENGTH) { + throw new Error( + `DecipherTransform: iv should be ${IV_LENGTH} bytes, got ${iv.byteLength} bytes` + ); + } + + this.decipher = createDecipheriv( + CipherType.AES256CBC, + Buffer.from(this.aesKey), + Buffer.from(iv) + ); + } + + override _flush(done: (error?: Error) => void) { + if (!this.decipher) { + done( + new Error( + "DecipherTransform: _flush called, but decipher isn't initialized" + ) + ); + return; + } + + try { + this.push(this.decipher.final()); + } catch (error) { + done(error); + return; + } + + done(); + } + + override _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!this.decipher) { + done( + new Error( + "DecipherTransform: got a chunk, but decipher isn't initialized" + ) + ); + return; + } + + if (!chunk || chunk.byteLength === 0) { + done(); + return; + } + + try { + this.push(this.decipher.update(chunk)); + } catch (error) { + done(error); + return; + } + + done(); + } +} + +// A simple transform that limits the provided data to `size` bytes. We use this to +// discard the padding on the incoming plaintext data. +class LimitLengthTransform extends Transform { + private bytesWritten = 0; + + constructor(private size: number) { + super(); + } + + override _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!chunk || chunk.byteLength === 0) { + done(); + return; + } + + try { + const chunkLength = chunk.byteLength; + const sizeLeft = this.size - this.bytesWritten; + + if (sizeLeft >= chunkLength) { + this.bytesWritten += chunkLength; + this.push(chunk); + } else if (sizeLeft > 0) { + this.bytesWritten += sizeLeft; + this.push(chunk.subarray(0, sizeLeft)); + } + } catch (error) { + done(error); + return; + } + + done(); + } +} + +// This is an unusual transform, in that it can produce quite a bit more data than it is +// provided. That's because it computes a bucket size for the provided size, which may +// be quite a bit bigger than the attachment, and then needs to provide those zeroes +// at the end of the stream. +const PADDING_CHUNK_SIZE = 64 * 1024; +class AddPaddingTransform extends Transform { + private bytesWritten = 0; + private targetLength: number; + private paddingChunksToWrite: Array = []; + private paddingCallback: ((error?: Error) => void) | undefined; + + constructor(private size: number) { + super(); + this.targetLength = getAttachmentSizeBucket(size); + } + + override _read(size: number): void { + if (this.paddingChunksToWrite.length > 0) { + // Restart our efforts to push padding downstream + this.pushPaddingChunks(); + } else { + Transform.prototype._read.call(this, size); + } + } + + public pushPaddingChunks(): boolean { + while (this.paddingChunksToWrite.length > 0) { + const [first, ...rest] = this.paddingChunksToWrite; + this.paddingChunksToWrite = rest; + + const zeroes = getZeroes(first); + + if (!this.push(zeroes)) { + // We shouldn't push any more; if we have more to push, we'll do it after a read() + break; + } + } + + if (this.paddingChunksToWrite.length > 0) { + return false; + } + + this.paddingCallback?.(); + return true; + } + + override _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!chunk || chunk.byteLength === 0) { + done(); + return; + } + try { + const chunkLength = chunk.byteLength; + const contentsStillNeeded = this.size - this.bytesWritten; + + if (contentsStillNeeded >= chunkLength) { + this.push(chunk); + this.bytesWritten += chunkLength; + } else if (contentsStillNeeded > 0) { + throw new Error( + `AddPaddingTransform: chunk length was ${chunkLength} but only ${contentsStillNeeded} bytes needed to get to size ${this.size}` + ); + } + + if (this.bytesWritten === this.size) { + const paddingNeeded = this.targetLength - this.size; + const chunks = Math.floor(paddingNeeded / PADDING_CHUNK_SIZE); + const remainder = paddingNeeded % PADDING_CHUNK_SIZE; + + for (let i = 0; i < chunks; i += 1) { + this.paddingChunksToWrite.push(PADDING_CHUNK_SIZE); + } + if (remainder > 0) { + this.paddingChunksToWrite.push(remainder); + } + + if (!this.pushPaddingChunks()) { + // If we didn't push all chunks, we shouldn't call done - we'll keep it around + // to call when we're actually done. + this.paddingCallback = done; + return; + } + } + } catch (error) { + done(error); + return; + } + + done(); + } +} + +// The transform that does the actual ciphering; quite simple in that it applies the +// cipher to all incoming data, and can initialize itself fully in its constructor. +class CipherTransform extends Transform { + private cipher: Cipher; + + constructor(private iv: Uint8Array, private aesKey: Uint8Array) { + super(); + + if (aesKey.byteLength !== KEY_LENGTH) { + throw new Error( + `CipherTransform: aesKey should be ${KEY_LENGTH} bytes, got ${aesKey.byteLength} bytes` + ); + } + if (iv.byteLength !== IV_LENGTH) { + throw new Error( + `CipherTransform: iv should be ${IV_LENGTH} bytes, got ${iv.byteLength} bytes` + ); + } + + this.cipher = createCipheriv( + CipherType.AES256CBC, + Buffer.from(this.aesKey), + Buffer.from(this.iv) + ); + } + + override _flush(done: (error?: Error) => void) { + try { + this.push(this.cipher.final()); + } catch (error) { + done(error); + return; + } + + done(); + } + + override _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!chunk || chunk.byteLength === 0) { + done(); + return; + } + + try { + this.push(this.cipher.update(chunk)); + } catch (error) { + done(error); + return; + } + + done(); + } +} + +// This very simple transform adds the provided iv data to the beginning of the stream. +class AddIvTransform extends Transform { + public haveAddedIv = false; + + constructor(private iv: Uint8Array) { + super(); + + if (iv.byteLength !== IV_LENGTH) { + throw new Error( + `MacTransform: iv should be ${IV_LENGTH} bytes, got ${iv.byteLength} bytes` + ); + } + } + + override _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!chunk || chunk.byteLength === 0) { + done(); + return; + } + + try { + if (!this.haveAddedIv) { + this.push(this.iv); + this.haveAddedIv = true; + } + this.push(chunk); + } catch (error) { + done(error); + return; + } + + done(); + } +} + +// This transform both calculates the mac and adds it to the end of the stream. +class AddMacTransform extends Transform { + public ourMac: Uint8Array | undefined; + private macBuilder: Hmac; + + constructor(macKey: Uint8Array) { + super(); + + if (macKey.byteLength !== KEY_LENGTH) { + throw new Error( + `MacTransform: macKey should be ${KEY_LENGTH} bytes, got ${macKey.byteLength} bytes` + ); + } + + this.macBuilder = createHmac('sha256', Buffer.from(macKey)); + } + + override _flush(done: (error?: Error) => void) { + try { + this.ourMac = this.macBuilder.digest(); + this.push(this.ourMac); + } catch (error) { + done(error); + return; + } + + done(); + } + + override _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!chunk || chunk.byteLength === 0) { + done(); + return; + } + + try { + this.macBuilder.update(chunk); + this.push(chunk); + } catch (error) { + done(error); + return; + } + + done(); + } +} diff --git a/ts/Crypto.ts b/ts/Crypto.ts index 6a0829234..9747cf422 100644 --- a/ts/Crypto.ts +++ b/ts/Crypto.ts @@ -10,6 +10,7 @@ import { calculateAgreement, generateKeyPair } from './Curve'; import { HashType, CipherType } from './types/Crypto'; import { ProfileDecryptError } from './types/errors'; import { getBytesSubarray } from './util/uuidToBytes'; +import { Environment } from './environment'; export { HashType, CipherType }; @@ -173,8 +174,8 @@ export function verifyAccessKey( } const IV_LENGTH = 16; -const MAC_LENGTH = 16; const NONCE_LENGTH = 16; +const SYMMETRIC_MAC_LENGTH = 16; export function encryptSymmetric( key: Uint8Array, @@ -187,7 +188,10 @@ export function encryptSymmetric( const macKey = hmacSha256(key, cipherKey); const ciphertext = encryptAes256CbcPkcsPadding(cipherKey, plaintext, iv); - const mac = getFirstBytes(hmacSha256(macKey, ciphertext), MAC_LENGTH); + const mac = getFirstBytes( + hmacSha256(macKey, ciphertext), + SYMMETRIC_MAC_LENGTH + ); return Bytes.concatenate([nonce, ciphertext, mac]); } @@ -202,18 +206,21 @@ export function decryptSymmetric( const ciphertext = getBytesSubarray( data, NONCE_LENGTH, - data.byteLength - NONCE_LENGTH - MAC_LENGTH + data.byteLength - NONCE_LENGTH - SYMMETRIC_MAC_LENGTH ); const theirMac = getBytesSubarray( data, - data.byteLength - MAC_LENGTH, - MAC_LENGTH + data.byteLength - SYMMETRIC_MAC_LENGTH, + SYMMETRIC_MAC_LENGTH ); const cipherKey = hmacSha256(key, nonce); const macKey = hmacSha256(key, cipherKey); - const ourMac = getFirstBytes(hmacSha256(macKey, ciphertext), MAC_LENGTH); + const ourMac = getFirstBytes( + hmacSha256(macKey, ciphertext), + SYMMETRIC_MAC_LENGTH + ); if (!constantTimeEqual(theirMac, ourMac)) { throw new Error( 'decryptSymmetric: Failed to decrypt; MAC verification failed' @@ -379,7 +386,7 @@ function verifyDigest(data: Uint8Array, theirDigest: Uint8Array): void { } } -export function decryptAttachment( +export function decryptAttachmentV1( encryptedBin: Uint8Array, keys: Uint8Array, theirDigest?: Uint8Array @@ -411,20 +418,31 @@ export function decryptAttachment( return decryptAes256CbcPkcsPadding(aesKey, ciphertext, iv); } -export function encryptAttachment( - plaintext: Readonly, - keys: Readonly -): EncryptedAttachment { +export function encryptAttachment({ + plaintext, + keys, + dangerousTestOnlyIv, +}: { + plaintext: Readonly; + keys: Readonly; + dangerousTestOnlyIv?: Readonly; +}): EncryptedAttachment { + const logId = 'encryptAttachment'; if (!(plaintext instanceof Uint8Array)) { throw new TypeError( - `\`plaintext\` must be an \`Uint8Array\`; got: ${typeof plaintext}` + `${logId}: \`plaintext\` must be an \`Uint8Array\`; got: ${typeof plaintext}` ); } if (keys.byteLength !== 64) { - throw new Error('Got invalid length attachment keys'); + throw new Error(`${logId}: invalid length attachment keys`); } - const iv = getRandomBytes(16); + + if (dangerousTestOnlyIv && window.getEnvironment() !== Environment.Test) { + throw new Error(`${logId}: Used dangerousTestOnlyIv outside tests!`); + } + + const iv = dangerousTestOnlyIv || getRandomBytes(16); const aesKey = keys.slice(0, 32); const macKey = keys.slice(32, 64); @@ -450,15 +468,24 @@ export function getAttachmentSizeBucket(size: number): number { ); } -export function padAndEncryptAttachment( - data: Readonly, - keys: Readonly -): EncryptedAttachment { - const size = data.byteLength; +export function padAndEncryptAttachment({ + plaintext, + keys, + dangerousTestOnlyIv, +}: { + plaintext: Readonly; + keys: Readonly; + dangerousTestOnlyIv?: Readonly; +}): EncryptedAttachment { + const size = plaintext.byteLength; const paddedSize = getAttachmentSizeBucket(size); const padding = getZeroes(paddedSize - size); - return encryptAttachment(Bytes.concatenate([data, padding]), keys); + return encryptAttachment({ + plaintext: Bytes.concatenate([plaintext, padding]), + keys, + dangerousTestOnlyIv, + }); } export function encryptProfile(data: Uint8Array, key: Uint8Array): Uint8Array { diff --git a/ts/RemoteConfig.ts b/ts/RemoteConfig.ts index 2d5b36833..cdab29fb4 100644 --- a/ts/RemoteConfig.ts +++ b/ts/RemoteConfig.ts @@ -45,6 +45,7 @@ export type ConfigKeyType = | 'desktop.textFormatting' | 'desktop.usernames' | 'global.attachments.maxBytes' + | 'global.attachments.maxReceiveBytes' | 'global.calling.maxGroupCallRingSize' | 'global.groupsv2.groupSizeHardLimit' | 'global.groupsv2.maxGroupSize' diff --git a/ts/components/conversation/ExpireTimer.tsx b/ts/components/conversation/ExpireTimer.tsx index 623fad8ee..be92f7e0b 100644 --- a/ts/components/conversation/ExpireTimer.tsx +++ b/ts/components/conversation/ExpireTimer.tsx @@ -7,20 +7,20 @@ import classNames from 'classnames'; import { getIncrement, getTimerBucket } from '../../util/timer'; export type Props = { - deletedForEveryone?: boolean; direction?: 'incoming' | 'outgoing'; expirationLength: number; expirationTimestamp?: number; + isOutlineOnlyBubble?: boolean; withImageNoCaption?: boolean; withSticker?: boolean; withTapToViewExpired?: boolean; }; export function ExpireTimer({ - deletedForEveryone, direction, expirationLength, expirationTimestamp, + isOutlineOnlyBubble, withImageNoCaption, withSticker, withTapToViewExpired, @@ -44,7 +44,7 @@ export function ExpireTimer({ 'module-expire-timer', `module-expire-timer--${bucket}`, direction ? `module-expire-timer--${direction}` : null, - deletedForEveryone ? 'module-expire-timer--deleted-for-everyone' : null, + isOutlineOnlyBubble ? 'module-expire-timer--outline-only-bubble' : null, withTapToViewExpired ? `module-expire-timer--${direction}-with-tap-to-view-expired` : null, diff --git a/ts/components/conversation/ImageGrid.tsx b/ts/components/conversation/ImageGrid.tsx index 1a73ffe2b..cf61d8f65 100644 --- a/ts/components/conversation/ImageGrid.tsx +++ b/ts/components/conversation/ImageGrid.tsx @@ -75,13 +75,16 @@ function getCurves({ curveTopRight = CurveType.Normal; } - if (shouldCollapseBelow && direction === 'incoming') { + if (withContentBelow) { + curveBottomLeft = CurveType.None; + curveBottomRight = CurveType.None; + } else if (shouldCollapseBelow && direction === 'incoming') { curveBottomLeft = CurveType.Tiny; curveBottomRight = CurveType.None; } else if (shouldCollapseBelow && direction === 'outgoing') { curveBottomLeft = CurveType.None; curveBottomRight = CurveType.Tiny; - } else if (!withContentBelow) { + } else { curveBottomLeft = CurveType.Normal; curveBottomRight = CurveType.Normal; } diff --git a/ts/components/conversation/Message.tsx b/ts/components/conversation/Message.tsx index ece9902f7..8fa9bc3a4 100644 --- a/ts/components/conversation/Message.tsx +++ b/ts/components/conversation/Message.tsx @@ -284,6 +284,7 @@ export type PropsData = { reactions?: ReactionViewerProps['reactions']; deletedForEveryone?: boolean; + attachmentDroppedDueToSize?: boolean; canDeleteForEveryone: boolean; isBlocked: boolean; @@ -565,6 +566,7 @@ export class Message extends React.PureComponent { private getMetadataPlacement( { attachments, + attachmentDroppedDueToSize, deletedForEveryone, direction, expirationLength, @@ -599,12 +601,16 @@ export class Message extends React.PureComponent { return MetadataPlacement.Bottom; } - if (!text && !deletedForEveryone) { + if (!text && !deletedForEveryone && !attachmentDroppedDueToSize) { return isAudio(attachments) ? MetadataPlacement.RenderedByMessageAudioComponent : MetadataPlacement.Bottom; } + if (!text && attachmentDroppedDueToSize) { + return MetadataPlacement.InlineWithText; + } + if (this.canRenderStickerLikeEmoji()) { return MetadataPlacement.Bottom; } @@ -796,6 +802,7 @@ export class Message extends React.PureComponent { } const { + attachmentDroppedDueToSize, deletedForEveryone, direction, expirationLength, @@ -822,11 +829,14 @@ export class Message extends React.PureComponent { direction={direction} expirationLength={expirationLength} expirationTimestamp={expirationTimestamp} - hasText={Boolean(text)} + hasText={Boolean(text || attachmentDroppedDueToSize)} i18n={i18n} id={id} isEditedMessage={isEditedMessage} isInline={isInline} + isOutlineOnlyBubble={ + deletedForEveryone || (attachmentDroppedDueToSize && !text) + } isShowingImage={this.isShowingImage()} isSticker={isStickerLike} isTapToViewExpired={isTapToViewExpired} @@ -878,6 +888,7 @@ export class Message extends React.PureComponent { public renderAttachment(): JSX.Element | null { const { attachments, + attachmentDroppedDueToSize, conversationId, direction, expirationLength, @@ -912,7 +923,7 @@ export class Message extends React.PureComponent { const firstAttachment = attachments[0]; // For attachments which aren't full-frame - const withContentBelow = Boolean(text); + const withContentBelow = Boolean(text || attachmentDroppedDueToSize); const withContentAbove = Boolean(quote) || this.shouldRenderAuthor(); const displayImage = canDisplayImage(attachments); @@ -1274,6 +1285,62 @@ export class Message extends React.PureComponent { ); } + public renderAttachmentTooBig(): JSX.Element | null { + const { + attachments, + attachmentDroppedDueToSize, + direction, + i18n, + quote, + shouldCollapseAbove, + shouldCollapseBelow, + text, + } = this.props; + const { metadataWidth } = this.state; + + if (!attachmentDroppedDueToSize) { + return null; + } + + const labelText = attachments?.length + ? i18n('icu:message--attachmentTooBig--multiple') + : i18n('icu:message--attachmentTooBig--one'); + + const isContentAbove = quote || attachments?.length; + const isContentBelow = Boolean(text); + const willCollapseAbove = shouldCollapseAbove && !isContentAbove; + const willCollapseBelow = shouldCollapseBelow && !isContentBelow; + + const maybeSpacer = text + ? undefined + : this.getMetadataPlacement() === MetadataPlacement.InlineWithText && ( + + ); + + return ( +
+ {labelText} + {maybeSpacer} +
+ ); + } + public renderGiftBadge(): JSX.Element | null { const { conversationTitle, direction, getPreferredBadge, giftBadge, i18n } = this.props; @@ -1757,6 +1824,19 @@ export class Message extends React.PureComponent { ); } + private getContents(): string | undefined { + const { deletedForEveryone, direction, i18n, status, text } = this.props; + + if (deletedForEveryone) { + return i18n('icu:message--deletedForEveryone'); + } + if (direction === 'incoming' && status === 'error') { + return i18n('icu:incomingError'); + } + + return text; + } + public renderText(): JSX.Element | null { const { bodyRanges, @@ -1772,17 +1852,12 @@ export class Message extends React.PureComponent { showConversation, showSpoiler, status, - text, + textAttachment, } = this.props; const { metadataWidth } = this.state; - // eslint-disable-next-line no-nested-ternary - const contents = deletedForEveryone - ? i18n('icu:message--deletedForEveryone') - : direction === 'incoming' && status === 'error' - ? i18n('icu:incomingError') - : text; + const contents = this.getContents(); if (!contents) { return null; @@ -2296,7 +2371,7 @@ export class Message extends React.PureComponent { } public renderContents(): JSX.Element | null { - const { giftBadge, isTapToView, deletedForEveryone } = this.props; + const { deletedForEveryone, giftBadge, isTapToView } = this.props; if (deletedForEveryone) { return ( @@ -2326,6 +2401,7 @@ export class Message extends React.PureComponent { {this.renderStoryReplyContext()} {this.renderAttachment()} {this.renderPreview()} + {this.renderAttachmentTooBig()} {this.renderPayment()} {this.renderEmbeddedContact()} {this.renderText()} @@ -2534,6 +2610,7 @@ export class Message extends React.PureComponent { public renderContainer(): JSX.Element { const { attachments, + attachmentDroppedDueToSize, conversationColor, customColor, deletedForEveryone, @@ -2597,7 +2674,12 @@ export class Message extends React.PureComponent { const containerStyles = { width: shouldUseWidth ? width : undefined, }; - if (!isStickerLike && !deletedForEveryone && direction === 'outgoing') { + if ( + !isStickerLike && + !deletedForEveryone && + !(attachmentDroppedDueToSize && !text) && + direction === 'outgoing' + ) { Object.assign(containerStyles, getCustomColorStyle(customColor)); } diff --git a/ts/components/conversation/MessageMetadata.tsx b/ts/components/conversation/MessageMetadata.tsx index 6c3cde9e5..26c4c7a6a 100644 --- a/ts/components/conversation/MessageMetadata.tsx +++ b/ts/components/conversation/MessageMetadata.tsx @@ -28,6 +28,7 @@ type PropsType = { id: string; isEditedMessage?: boolean; isInline?: boolean; + isOutlineOnlyBubble?: boolean; isShowingImage: boolean; isSticker?: boolean; isTapToViewExpired?: boolean; @@ -55,6 +56,7 @@ export const MessageMetadata = forwardRef>( i18n, id, isEditedMessage, + isOutlineOnlyBubble, isInline, isShowingImage, isSticker, @@ -136,8 +138,8 @@ export const MessageMetadata = forwardRef>( className={classNames({ 'module-message__metadata__date': true, 'module-message__metadata__date--with-sticker': isSticker, - 'module-message__metadata__date--deleted-for-everyone': - deletedForEveryone, + 'module-message__metadata__date--outline-only-bubble': + isOutlineOnlyBubble, [`module-message__metadata__date--${direction}`]: !isSticker, 'module-message__metadata__date--with-image-no-caption': withImageNoCaption, @@ -149,9 +151,9 @@ export const MessageMetadata = forwardRef>( } else { timestampNode = ( >( 'module-message__metadata', isInline && 'module-message__metadata--inline', withImageNoCaption && 'module-message__metadata--with-image-no-caption', - deletedForEveryone && 'module-message__metadata--deleted-for-everyone' + isOutlineOnlyBubble && 'module-message__metadata--outline-only-bubble' ); const children = ( <> @@ -212,7 +214,7 @@ export const MessageMetadata = forwardRef>( {expirationLength ? ( >( withImageNoCaption ? 'module-message__metadata__status-icon--with-image-no-caption' : null, - deletedForEveryone - ? 'module-message__metadata__status-icon--deleted-for-everyone' + isOutlineOnlyBubble + ? 'module-message__metadata__status-icon--outline-only-bubble' : null, isTapToViewExpired ? 'module-message__metadata__status-icon--with-tap-to-view-expired' diff --git a/ts/components/conversation/MessageTimestamp.tsx b/ts/components/conversation/MessageTimestamp.tsx index c64642baa..5fa1c4465 100644 --- a/ts/components/conversation/MessageTimestamp.tsx +++ b/ts/components/conversation/MessageTimestamp.tsx @@ -12,9 +12,9 @@ import { Time } from '../Time'; import { useNowThatUpdatesEveryMinute } from '../../hooks/useNowThatUpdatesEveryMinute'; export type Props = { - deletedForEveryone?: boolean; direction?: 'incoming' | 'outgoing'; i18n: LocalizerType; + isOutlineOnlyBubble?: boolean; isRelativeTime?: boolean; module?: string; timestamp: number; @@ -24,10 +24,10 @@ export type Props = { }; export function MessageTimestamp({ - deletedForEveryone, direction, i18n, isRelativeTime, + isOutlineOnlyBubble, module, timestamp, withImageNoCaption, @@ -47,7 +47,7 @@ export function MessageTimestamp({ : null, withImageNoCaption ? `${moduleName}--with-image-no-caption` : null, withSticker ? `${moduleName}--with-sticker` : null, - deletedForEveryone ? `${moduleName}--deleted-for-everyone` : null + isOutlineOnlyBubble ? `${moduleName}--ouline-only-bubble` : null )} timestamp={timestamp} > diff --git a/ts/components/conversation/TimelineMessage.stories.tsx b/ts/components/conversation/TimelineMessage.stories.tsx index 70072d60e..60ec0ef8e 100644 --- a/ts/components/conversation/TimelineMessage.stories.tsx +++ b/ts/components/conversation/TimelineMessage.stories.tsx @@ -244,6 +244,7 @@ const renderAudioAttachment: Props['renderAudioAttachment'] = props => ( const createProps = (overrideProps: Partial = {}): Props => ({ attachments: overrideProps.attachments, + attachmentDroppedDueToSize: overrideProps.attachmentDroppedDueToSize || false, author: overrideProps.author || getDefaultConversation(), bodyRanges: overrideProps.bodyRanges, canCopy: true, @@ -835,6 +836,25 @@ CanDeleteForEveryone.args = { direction: 'outgoing', }; +export function AttachmentTooBig(): JSX.Element { + const propsSent = createProps({ + conversationType: 'direct', + attachmentDroppedDueToSize: true, + }); + + return <>{renderBothDirections(propsSent)}; +} + +export function AttachmentTooBigWithText(): JSX.Element { + const propsSent = createProps({ + conversationType: 'direct', + attachmentDroppedDueToSize: true, + text: 'Check out this file!', + }); + + return <>{renderBothDirections(propsSent)}; +} + export const Error = Template.bind({}); Error.args = { status: 'error', @@ -1233,6 +1253,51 @@ MultipleImages5.args = { status: 'sent', }; +export const MultipleImagesWithOneTooBig = Template.bind({}); +MultipleImagesWithOneTooBig.args = { + attachments: [ + fakeAttachment({ + url: pngUrl, + fileName: 'the-sax.png', + contentType: IMAGE_PNG, + height: 240, + width: 320, + }), + fakeAttachment({ + url: pngUrl, + fileName: 'the-sax.png', + contentType: IMAGE_PNG, + height: 240, + width: 320, + }), + ], + attachmentDroppedDueToSize: true, + status: 'sent', +}; + +export const MultipleImagesWithBodyTextOneTooBig = Template.bind({}); +MultipleImagesWithBodyTextOneTooBig.args = { + attachments: [ + fakeAttachment({ + url: pngUrl, + fileName: 'the-sax.png', + contentType: IMAGE_PNG, + height: 240, + width: 320, + }), + fakeAttachment({ + url: pngUrl, + fileName: 'the-sax.png', + contentType: IMAGE_PNG, + height: 240, + width: 320, + }), + ], + attachmentDroppedDueToSize: true, + text: 'Hey, check out these images!', + status: 'sent', +}; + export const ImageWithCaption = Template.bind({}); ImageWithCaption.args = { attachments: [ @@ -1968,6 +2033,7 @@ PaymentNotification.args = { function MultiSelectMessage() { const [selected, setSelected] = React.useState(false); + return ( void): () => void; onUserInterrupt(reason: string): void; diff --git a/ts/linkPreviews/linkPreviewFetch.ts b/ts/linkPreviews/linkPreviewFetch.ts index 8da77fcf8..1423c551a 100644 --- a/ts/linkPreviews/linkPreviewFetch.ts +++ b/ts/linkPreviews/linkPreviewFetch.ts @@ -612,6 +612,7 @@ export async function fetchLinkPreviewImage( const { blob: xcodedDataBlob } = await scaleImageToLevel( dataBlob, contentType, + dataBlob.size, false ); const xcodedDataArrayBuffer = await blobToArrayBuffer(xcodedDataBlob); diff --git a/ts/messageModifiers/AttachmentDownloads.ts b/ts/messageModifiers/AttachmentDownloads.ts index 3439ce189..22ea453b0 100644 --- a/ts/messageModifiers/AttachmentDownloads.ts +++ b/ts/messageModifiers/AttachmentDownloads.ts @@ -15,12 +15,21 @@ import type { AttachmentDownloadJobTypeType, } from '../sql/Interface'; +import { getValue } from '../RemoteConfig'; import type { MessageModel } from '../models/messages'; import type { AttachmentType } from '../types/Attachment'; -import { getAttachmentSignature, isDownloaded } from '../types/Attachment'; +import { + AttachmentSizeError, + getAttachmentSignature, + isDownloaded, +} from '../types/Attachment'; import * as Errors from '../types/errors'; import type { LoggerType } from '../types/Logging'; import * as log from '../logging/log'; +import { + KIBIBYTE, + getMaximumIncomingAttachmentSizeInKb, +} from '../types/AttachmentSize'; const { getMessageById, @@ -269,13 +278,40 @@ async function _runJob(job?: AttachmentDownloadJobType): Promise { return; } - await _addAttachmentToMessage( - message, - { ...attachment, pending: true }, - { type, index } - ); + let downloaded: AttachmentType | null = null; - const downloaded = await downloadAttachment(attachment); + try { + const { size } = attachment; + const maxInKib = getMaximumIncomingAttachmentSizeInKb(getValue); + const sizeInKib = size / KIBIBYTE; + if (!size || sizeInKib > maxInKib) { + throw new AttachmentSizeError( + `Attachment Job ${id}: Attachment was ${sizeInKib}kib, max is ${maxInKib}kib` + ); + } + + await _addAttachmentToMessage( + message, + { ...attachment, pending: true }, + { type, index } + ); + + // If the download is bigger than expected, we'll stop in the middle + downloaded = await downloadAttachment(attachment); + } catch (error) { + if (error instanceof AttachmentSizeError) { + log.error(Errors.toLogFormat(error)); + await _addAttachmentToMessage( + message, + _markAttachmentAsTooBig(attachment), + { type, index } + ); + await _finishJob(message, id); + return; + } + + throw error; + } if (!downloaded) { logger.warn( @@ -444,6 +480,14 @@ function _markAttachmentAsPermanentError( }; } +function _markAttachmentAsTooBig(attachment: AttachmentType): AttachmentType { + return { + ...omit(attachment, ['key', 'id']), + error: true, + wasTooBig: true, + }; +} + function _markAttachmentAsTransientError( attachment: AttachmentType ): AttachmentType { diff --git a/ts/model-types.d.ts b/ts/model-types.d.ts index 3dda1acfc..b0320ecb5 100644 --- a/ts/model-types.d.ts +++ b/ts/model-types.d.ts @@ -17,7 +17,7 @@ import type { GroupNameCollisionsWithIdsByTitle } from './util/groupMemberNameCo import type { AttachmentDraftType, AttachmentType } from './types/Attachment'; import type { EmbeddedContactType } from './types/EmbeddedContact'; import { SignalService as Proto } from './protobuf'; -import type { AvatarDataType } from './types/Avatar'; +import type { AvatarDataType, ContactAvatarType } from './types/Avatar'; import type { AciString, PniString, ServiceIdString } from './types/ServiceId'; import type { StoryDistributionIdString } from './types/StoryDistributionId'; import type { SeenStatus } from './MessageSeenStatus'; @@ -331,10 +331,7 @@ export type ConversationAttributesType = { messageRequestResponseType?: number; muteExpiresAt?: number; dontNotifyForMentionsIfMuted?: boolean; - profileAvatar?: null | { - hash: string; - path: string; - }; + profileAvatar?: ContactAvatarType | null; profileKeyCredential?: string | null; profileKeyCredentialExpiration?: number | null; lastProfile?: ConversationLastProfileType; @@ -415,11 +412,7 @@ export type ConversationAttributesType = { addFromInviteLink: AccessRequiredEnum; }; announcementsOnly?: boolean; - avatar?: { - url: string; - path: string; - hash?: string; - } | null; + avatar?: ContactAvatarType | null; avatars?: Array; description?: string; expireTimer?: DurationInSeconds; diff --git a/ts/models/conversations.ts b/ts/models/conversations.ts index f76471f0a..e307cd50c 100644 --- a/ts/models/conversations.ts +++ b/ts/models/conversations.ts @@ -4660,8 +4660,8 @@ export class ConversationModel extends window.Backbone if (decrypted) { const newAttributes = await Conversation.maybeUpdateProfileAvatar( this.attributes, - decrypted, { + data: decrypted, writeNewAttachmentData, deleteAttachmentData, doesAttachmentExist, diff --git a/ts/services/contactSync.ts b/ts/services/contactSync.ts index 0d5890a71..87d7a35e8 100644 --- a/ts/services/contactSync.ts +++ b/ts/services/contactSync.ts @@ -4,7 +4,7 @@ import PQueue from 'p-queue'; import type { ContactSyncEvent } from '../textsecure/messageReceiverEvents'; -import type { ModifiedContactDetails } from '../textsecure/ContactsParser'; +import type { ContactDetailsWithAvatar } from '../textsecure/ContactsParser'; import { normalizeAci } from '../util/normalizeAci'; import * as Conversation from '../types/Conversation'; import * as Errors from '../types/errors'; @@ -13,6 +13,7 @@ import type { ConversationModel } from '../models/conversations'; import { validateConversation } from '../util/validateConversation'; import { isDirectConversation, isMe } from '../util/whatTypeOfConversation'; import * as log from '../logging/log'; +import { dropNull } from '../util/dropNull'; // When true - we are running the very first storage and contact sync after // linking. @@ -25,7 +26,7 @@ export function setIsInitialSync(newValue: boolean): void { async function updateConversationFromContactSync( conversation: ConversationModel, - details: ModifiedContactDetails, + details: ContactDetailsWithAvatar, receivedAtCounter: number, sentAt: number ): Promise { @@ -33,17 +34,17 @@ async function updateConversationFromContactSync( window.Signal.Migrations; conversation.set({ - name: details.name, - inbox_position: details.inboxPosition, + name: dropNull(details.name), + inbox_position: dropNull(details.inboxPosition), }); // Update the conversation avatar only if new avatar exists and hash differs const { avatar } = details; - if (avatar && avatar.data) { + if (avatar && avatar.path) { const newAttributes = await Conversation.maybeUpdateAvatar( conversation.attributes, - avatar.data, { + newAvatar: avatar, writeNewAttachmentData, deleteAttachmentData, doesAttachmentExist, diff --git a/ts/state/ducks/composer.ts b/ts/state/ducks/composer.ts index d6fe2c8b5..715fce5fd 100644 --- a/ts/state/ducks/composer.ts +++ b/ts/state/ducks/composer.ts @@ -51,7 +51,7 @@ import { suspendLinkPreviews, } from '../../services/LinkPreview'; import { - getMaximumAttachmentSizeInKb, + getMaximumOutgoingAttachmentSizeInKb, getRenderDetailsForLimit, KIBIBYTE, } from '../../types/AttachmentSize'; @@ -1167,7 +1167,7 @@ function preProcessAttachment( // Putting this after everything else because the other checks are more // important to show to the user. - const limitKb = getMaximumAttachmentSizeInKb(getRemoteConfigValue); + const limitKb = getMaximumOutgoingAttachmentSizeInKb(getRemoteConfigValue); if (file.size / KIBIBYTE > limitKb) { return { toastType: ToastType.FileSize, diff --git a/ts/state/selectors/message.ts b/ts/state/selectors/message.ts index e43a44350..c79804cb2 100644 --- a/ts/state/selectors/message.ts +++ b/ts/state/selectors/message.ts @@ -676,6 +676,9 @@ export const getPropsForMessage = ( message: MessageWithUIFieldsType, options: GetPropsForMessageOptions ): Omit => { + const attachmentDroppedDueToSize = message.attachments?.some( + item => item.wasTooBig + ); const attachments = getAttachmentsForMessage(message); const bodyRanges = processBodyRanges(message, options); const author = getAuthorForMessage(message, options); @@ -734,6 +737,7 @@ export const getPropsForMessage = ( return { attachments, + attachmentDroppedDueToSize, author, bodyRanges, previews, diff --git a/ts/state/smart/StoriesTab.tsx b/ts/state/smart/StoriesTab.tsx index 14cc797c0..d63584875 100644 --- a/ts/state/smart/StoriesTab.tsx +++ b/ts/state/smart/StoriesTab.tsx @@ -8,7 +8,7 @@ import type { LocalizerType } from '../../types/Util'; import type { StateType } from '../reducer'; import { SmartStoryCreator } from './StoryCreator'; import { StoriesTab } from '../../components/StoriesTab'; -import { getMaximumAttachmentSizeInKb } from '../../types/AttachmentSize'; +import { getMaximumOutgoingAttachmentSizeInKb } from '../../types/AttachmentSize'; import type { ConfigKeyType } from '../../RemoteConfig'; import { getMe } from '../selectors/conversations'; import { getIntl, getTheme } from '../selectors/user'; @@ -74,7 +74,7 @@ export function SmartStoriesTab(): JSX.Element | null { const otherTabsUnreadStats = useSelector(getOtherTabsUnreadStats); const remoteConfig = useSelector(getRemoteConfig); - const maxAttachmentSizeInKb = getMaximumAttachmentSizeInKb( + const maxAttachmentSizeInKb = getMaximumOutgoingAttachmentSizeInKb( (name: ConfigKeyType) => { const value = remoteConfig[name]?.value; return value ? String(value) : undefined; diff --git a/ts/test-both/ContactsParser_test.ts b/ts/test-both/ContactsParser_test.ts deleted file mode 100644 index 3dea042f2..000000000 --- a/ts/test-both/ContactsParser_test.ts +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2015 Signal Messenger, LLC -// SPDX-License-Identifier: AGPL-3.0-only - -import { assert } from 'chai'; -import protobuf from '../protobuf/wrap'; - -import * as Bytes from '../Bytes'; -import { SignalService as Proto } from '../protobuf'; -import { ContactBuffer } from '../textsecure/ContactsParser'; - -const { Writer } = protobuf; - -describe('ContactsParser', () => { - function generateAvatar(): Uint8Array { - const result = new Uint8Array(255); - for (let i = 0; i < result.length; i += 1) { - result[i] = i; - } - return result; - } - - describe('ContactBuffer', () => { - function getTestBuffer(): Uint8Array { - const avatarBuffer = generateAvatar(); - - const contactInfoBuffer = Proto.ContactDetails.encode({ - name: 'Zero Cool', - number: '+10000000000', - aci: '7198E1BD-1293-452A-A098-F982FF201902', - avatar: { contentType: 'image/jpeg', length: avatarBuffer.length }, - }).finish(); - - const writer = new Writer(); - writer.bytes(contactInfoBuffer); - const prefixedContact = writer.finish(); - - const chunks: Array = []; - for (let i = 0; i < 3; i += 1) { - chunks.push(prefixedContact); - chunks.push(avatarBuffer); - } - - return Bytes.concatenate(chunks); - } - - it('parses an array buffer of contacts', () => { - const bytes = getTestBuffer(); - const contactBuffer = new ContactBuffer(bytes); - let contact = contactBuffer.next(); - let count = 0; - while (contact !== undefined) { - count += 1; - assert.strictEqual(contact.name, 'Zero Cool'); - assert.strictEqual(contact.number, '+10000000000'); - assert.strictEqual(contact.aci, '7198e1bd-1293-452a-a098-f982ff201902'); - assert.strictEqual(contact.avatar?.contentType, 'image/jpeg'); - assert.strictEqual(contact.avatar?.length, 255); - assert.strictEqual(contact.avatar?.data.byteLength, 255); - const avatarBytes = new Uint8Array( - contact.avatar?.data || new Uint8Array(0) - ); - for (let j = 0; j < 255; j += 1) { - assert.strictEqual(avatarBytes[j], j); - } - contact = contactBuffer.next(); - } - assert.strictEqual(count, 3); - }); - }); -}); diff --git a/ts/test-both/helpers/fakeAttachment.ts b/ts/test-both/helpers/fakeAttachment.ts index 49e0a6b81..1178392e0 100644 --- a/ts/test-both/helpers/fakeAttachment.ts +++ b/ts/test-both/helpers/fakeAttachment.ts @@ -15,6 +15,8 @@ export const fakeAttachment = ( width: 800, height: 600, size: 10304, + // This is to get rid of the download buttons on most of our stories + path: 'ab/ablahblahblah', ...overrides, }); diff --git a/ts/test-electron/ContactsParser_test.ts b/ts/test-electron/ContactsParser_test.ts new file mode 100644 index 000000000..be6a3158a --- /dev/null +++ b/ts/test-electron/ContactsParser_test.ts @@ -0,0 +1,277 @@ +// Copyright 2015 Signal Messenger, LLC +// SPDX-License-Identifier: AGPL-3.0-only + +import { assert } from 'chai'; +import { createReadStream, readFileSync, unlinkSync, writeFileSync } from 'fs'; +import { v4 as generateGuid } from 'uuid'; +import { join } from 'path'; +import { pipeline } from 'stream/promises'; +import { Transform } from 'stream'; + +import protobuf from '../protobuf/wrap'; +import * as log from '../logging/log'; +import * as Bytes from '../Bytes'; +import * as Errors from '../types/errors'; +import { SignalService as Proto } from '../protobuf'; +import { + ParseContactsTransform, + parseContactsV2, +} from '../textsecure/ContactsParser'; +import type { ContactDetailsWithAvatar } from '../textsecure/ContactsParser'; +import { createTempDir, deleteTempDir } from '../updater/common'; +import { strictAssert } from '../util/assert'; + +const { Writer } = protobuf; + +describe('ContactsParser', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = await createTempDir(); + }); + afterEach(async () => { + await deleteTempDir(log, tempDir); + }); + + describe('parseContactsV2', () => { + it('parses an array buffer of contacts', async () => { + let absolutePath: string | undefined; + + try { + const bytes = getTestBuffer(); + const fileName = generateGuid(); + absolutePath = join(tempDir, fileName); + writeFileSync(absolutePath, bytes); + + const contacts = await parseContactsV2({ absolutePath }); + assert.strictEqual(contacts.length, 3); + + contacts.forEach(contact => { + verifyContact(contact); + }); + } finally { + if (absolutePath) { + unlinkSync(absolutePath); + } + } + }); + + it('parses an array buffer of contacts with small chunk size', async () => { + let absolutePath: string | undefined; + + try { + const bytes = getTestBuffer(); + const fileName = generateGuid(); + absolutePath = join(tempDir, fileName); + writeFileSync(absolutePath, bytes); + + const contacts = await parseContactsWithSmallChunkSize({ + absolutePath, + }); + assert.strictEqual(contacts.length, 3); + + contacts.forEach(contact => { + verifyContact(contact); + }); + } finally { + if (absolutePath) { + unlinkSync(absolutePath); + } + } + }); + + it('parses an array buffer of contacts where one contact has no avatar', async () => { + let absolutePath: string | undefined; + + try { + const bytes = Bytes.concatenate([ + generatePrefixedContact(undefined), + getTestBuffer(), + ]); + + const fileName = generateGuid(); + absolutePath = join(tempDir, fileName); + writeFileSync(absolutePath, bytes); + + const contacts = await parseContactsWithSmallChunkSize({ + absolutePath, + }); + assert.strictEqual(contacts.length, 4); + + contacts.forEach((contact, index) => { + const avatarIsMissing = index === 0; + verifyContact(contact, avatarIsMissing); + }); + } finally { + if (absolutePath) { + unlinkSync(absolutePath); + } + } + }); + + it('parses an array buffer of contacts where contacts are dropped due to missing ACI', async () => { + let absolutePath: string | undefined; + + try { + const avatarBuffer = generateAvatar(); + const bytes = Bytes.concatenate([ + generatePrefixedContact(avatarBuffer, 'invalid'), + avatarBuffer, + generatePrefixedContact(undefined, 'invalid'), + getTestBuffer(), + ]); + + const fileName = generateGuid(); + absolutePath = join(tempDir, fileName); + writeFileSync(absolutePath, bytes); + + const contacts = await parseContactsWithSmallChunkSize({ + absolutePath, + }); + assert.strictEqual(contacts.length, 3); + + contacts.forEach(contact => { + verifyContact(contact); + }); + } finally { + if (absolutePath) { + unlinkSync(absolutePath); + } + } + }); + }); +}); + +class SmallChunksTransform extends Transform { + constructor(private chunkSize: number) { + super(); + } + + override _transform( + incomingChunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!incomingChunk || incomingChunk.byteLength === 0) { + done(); + return; + } + + try { + const totalSize = incomingChunk.byteLength; + + const chunkCount = Math.floor(totalSize / this.chunkSize); + const remainder = totalSize % this.chunkSize; + + for (let i = 0; i < chunkCount; i += 1) { + const start = i * this.chunkSize; + const end = start + this.chunkSize; + this.push(incomingChunk.subarray(start, end)); + } + if (remainder > 0) { + this.push(incomingChunk.subarray(chunkCount * this.chunkSize)); + } + } catch (error) { + done(error); + return; + } + + done(); + } +} + +function generateAvatar(): Uint8Array { + const result = new Uint8Array(255); + for (let i = 0; i < result.length; i += 1) { + result[i] = i; + } + return result; +} + +function getTestBuffer(): Uint8Array { + const avatarBuffer = generateAvatar(); + const prefixedContact = generatePrefixedContact(avatarBuffer); + + const chunks: Array = []; + for (let i = 0; i < 3; i += 1) { + chunks.push(prefixedContact); + chunks.push(avatarBuffer); + } + + return Bytes.concatenate(chunks); +} + +function generatePrefixedContact( + avatarBuffer: Uint8Array | undefined, + aci = '7198E1BD-1293-452A-A098-F982FF201902' +) { + const contactInfoBuffer = Proto.ContactDetails.encode({ + name: 'Zero Cool', + number: '+10000000000', + aci, + avatar: avatarBuffer + ? { contentType: 'image/jpeg', length: avatarBuffer.length } + : undefined, + }).finish(); + + const writer = new Writer(); + writer.bytes(contactInfoBuffer); + const prefixedContact = writer.finish(); + return prefixedContact; +} + +function verifyContact( + contact: ContactDetailsWithAvatar, + avatarIsMissing?: boolean +) { + assert.strictEqual(contact.name, 'Zero Cool'); + assert.strictEqual(contact.number, '+10000000000'); + assert.strictEqual(contact.aci, '7198e1bd-1293-452a-a098-f982ff201902'); + + if (avatarIsMissing) { + return; + } + + const path = contact.avatar?.path; + strictAssert(path, 'Avatar needs path'); + + const absoluteAttachmentPath = + window.Signal.Migrations.getAbsoluteAttachmentPath(path); + const avatarBytes = readFileSync(absoluteAttachmentPath); + unlinkSync(absoluteAttachmentPath); + + for (let j = 0; j < 255; j += 1) { + assert.strictEqual(avatarBytes[j], j); + } +} + +async function parseContactsWithSmallChunkSize({ + absolutePath, +}: { + absolutePath: string; +}): Promise> { + const logId = 'parseContactsWithSmallChunkSize'; + + const readStream = createReadStream(absolutePath); + const smallChunksTransform = new SmallChunksTransform(32); + const parseContactsTransform = new ParseContactsTransform(); + + try { + await pipeline(readStream, smallChunksTransform, parseContactsTransform); + } catch (error) { + try { + readStream.close(); + } catch (cleanupError) { + log.error( + `${logId}: Failed to clean up after error`, + Errors.toLogFormat(cleanupError) + ); + } + + throw error; + } + + readStream.close(); + + return parseContactsTransform.contacts; +} diff --git a/ts/test-electron/Crypto_test.ts b/ts/test-electron/Crypto_test.ts index f0b027ac2..44be4fd2d 100644 --- a/ts/test-electron/Crypto_test.ts +++ b/ts/test-electron/Crypto_test.ts @@ -1,8 +1,13 @@ // Copyright 2015 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import { assert } from 'chai'; +/* eslint-disable @typescript-eslint/no-non-null-assertion */ +import { assert } from 'chai'; +import { readFileSync, unlinkSync, writeFileSync } from 'fs'; +import { join } from 'path'; + +import * as log from '../logging/log'; import * as Bytes from '../Bytes'; import * as Curve from '../Curve'; import { @@ -27,7 +32,12 @@ import { hmacSha256, verifyHmacSha256, randomInt, + encryptAttachment, + decryptAttachmentV1, + padAndEncryptAttachment, } from '../Crypto'; +import { decryptAttachmentV2, encryptAttachmentV2 } from '../AttachmentCrypto'; +import { createTempDir, deleteTempDir } from '../updater/common'; import { uuidToBytes, bytesToUuid } from '../util/uuidToBytes'; const BUCKET_SIZES = [ @@ -586,4 +596,188 @@ describe('Crypto', () => { assert.strictEqual(count, 0, failures.join('\n')); }); }); + + describe('attachments', () => { + const FILE_PATH = join(__dirname, '../../fixtures/ghost-kitty.mp4'); + const FILE_CONTENTS = readFileSync(FILE_PATH); + let tempDir: string | undefined; + + beforeEach(async () => { + tempDir = await createTempDir(); + }); + afterEach(async () => { + if (tempDir) { + await deleteTempDir(log, tempDir); + } + }); + + it('v1 roundtrips (memory only)', () => { + const keys = getRandomBytes(64); + + // Note: support for padding is not in decryptAttachmentV1, so we don't pad here + const encryptedAttachment = encryptAttachment({ + plaintext: FILE_CONTENTS, + keys, + }); + const plaintext = decryptAttachmentV1( + encryptedAttachment.ciphertext, + keys, + encryptedAttachment.digest + ); + + assert.isTrue(constantTimeEqual(FILE_CONTENTS, plaintext)); + }); + + it('v1 -> v2 (memory -> disk)', async () => { + const keys = getRandomBytes(64); + const ciphertextPath = join(tempDir!, 'file'); + let plaintextPath; + + try { + const encryptedAttachment = padAndEncryptAttachment({ + plaintext: FILE_CONTENTS, + keys, + }); + writeFileSync(ciphertextPath, encryptedAttachment.ciphertext); + + const plaintextRelativePath = await decryptAttachmentV2({ + ciphertextPath, + id: 'test', + keys, + size: FILE_CONTENTS.byteLength, + theirDigest: encryptedAttachment.digest, + }); + plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath( + plaintextRelativePath + ); + const plaintext = readFileSync(plaintextPath); + + assert.isTrue(constantTimeEqual(FILE_CONTENTS, plaintext)); + } finally { + if (plaintextPath) { + unlinkSync(plaintextPath); + } + } + }); + + it('v2 roundtrips (all on disk)', async () => { + const keys = getRandomBytes(64); + let plaintextPath; + let ciphertextPath; + + try { + const encryptedAttachment = await encryptAttachmentV2({ + keys, + plaintextAbsolutePath: FILE_PATH, + size: FILE_CONTENTS.byteLength, + }); + + ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath( + encryptedAttachment.path + ); + const plaintextRelativePath = await decryptAttachmentV2({ + ciphertextPath, + id: 'test', + keys, + size: FILE_CONTENTS.byteLength, + theirDigest: encryptedAttachment.digest, + }); + plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath( + plaintextRelativePath + ); + const plaintext = readFileSync(plaintextPath); + + assert.isTrue(constantTimeEqual(FILE_CONTENTS, plaintext)); + } finally { + if (plaintextPath) { + unlinkSync(plaintextPath); + } + if (ciphertextPath) { + unlinkSync(ciphertextPath); + } + } + }); + + it('v2 -> v1 (disk -> memory)', async () => { + const keys = getRandomBytes(64); + let ciphertextPath; + + try { + const encryptedAttachment = await encryptAttachmentV2({ + keys, + plaintextAbsolutePath: FILE_PATH, + size: FILE_CONTENTS.byteLength, + }); + ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath( + encryptedAttachment.path + ); + + const ciphertext = readFileSync(ciphertextPath); + + const plaintext = decryptAttachmentV1( + ciphertext, + keys, + encryptedAttachment.digest + ); + + const IV = 16; + const MAC = 32; + const PADDING_FOR_GHOST_KITTY = 126_066; // delta between file size and next bucket + assert.strictEqual( + plaintext.byteLength, + FILE_CONTENTS.byteLength + IV + MAC + PADDING_FOR_GHOST_KITTY, + 'verify padding' + ); + + // Note: support for padding is not in decryptAttachmentV1, so we manually unpad + const plaintextWithoutPadding = plaintext.subarray( + 0, + FILE_CONTENTS.byteLength + ); + assert.isTrue( + constantTimeEqual(FILE_CONTENTS, plaintextWithoutPadding) + ); + } finally { + if (ciphertextPath) { + unlinkSync(ciphertextPath); + } + } + }); + + it('v1 and v2 produce the same ciphertext, given same iv', async () => { + const keys = getRandomBytes(64); + let ciphertextPath; + + const dangerousTestOnlyIv = getRandomBytes(16); + + try { + const encryptedAttachmentV1 = padAndEncryptAttachment({ + plaintext: FILE_CONTENTS, + keys, + dangerousTestOnlyIv, + }); + const ciphertextV1 = encryptedAttachmentV1.ciphertext; + + const encryptedAttachmentV2 = await encryptAttachmentV2({ + keys, + plaintextAbsolutePath: FILE_PATH, + size: FILE_CONTENTS.byteLength, + dangerousTestOnlyIv, + }); + ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath( + encryptedAttachmentV2.path + ); + + const ciphertextV2 = readFileSync(ciphertextPath); + + assert.strictEqual(ciphertextV1.byteLength, ciphertextV2.byteLength); + + assert.isTrue(constantTimeEqual(ciphertextV1, ciphertextV2)); + } finally { + if (ciphertextPath) { + unlinkSync(ciphertextPath); + } + } + }); + }); }); diff --git a/ts/test-electron/util/scaleImageToLevel_test.ts b/ts/test-electron/util/scaleImageToLevel_test.ts index e06435a8a..ed803df46 100644 --- a/ts/test-electron/util/scaleImageToLevel_test.ts +++ b/ts/test-electron/util/scaleImageToLevel_test.ts @@ -35,7 +35,12 @@ describe('scaleImageToLevel', () => { testCases.map( async ({ path, contentType, expectedWidth, expectedHeight }) => { const blob = await getBlob(path); - const scaled = await scaleImageToLevel(blob, contentType, true); + const scaled = await scaleImageToLevel( + blob, + contentType, + blob.size, + true + ); const data = await loadImage(scaled.blob, { orientation: true }); const { originalWidth: width, originalHeight: height } = data; @@ -56,7 +61,7 @@ describe('scaleImageToLevel', () => { 'Test setup failure: expected fixture to have EXIF data' ); - const scaled = await scaleImageToLevel(original, IMAGE_JPEG, true); + const scaled = await scaleImageToLevel(original, IMAGE_JPEG, original.size); assert.isUndefined( (await loadImage(scaled.blob, { meta: true, orientation: true })).exif ); diff --git a/ts/test-node/types/EmbeddedContact_test.ts b/ts/test-node/types/EmbeddedContact_test.ts index 414fba248..281202a35 100644 --- a/ts/test-node/types/EmbeddedContact_test.ts +++ b/ts/test-node/types/EmbeddedContact_test.ts @@ -165,6 +165,7 @@ describe('Contact', () => { avatar: fakeAttachment({ pending: true, contentType: IMAGE_GIF, + path: undefined, }), }, }; diff --git a/ts/textsecure/ContactsParser.ts b/ts/textsecure/ContactsParser.ts index d2f323b44..14b639b9f 100644 --- a/ts/textsecure/ContactsParser.ts +++ b/ts/textsecure/ContactsParser.ts @@ -1,159 +1,233 @@ // Copyright 2020 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -/* eslint-disable max-classes-per-file */ - -import protobuf from '../protobuf/wrap'; +import { createReadStream } from 'fs'; +import { Transform } from 'stream'; +import { pipeline } from 'stream/promises'; import { SignalService as Proto } from '../protobuf'; +import protobuf from '../protobuf/wrap'; import { normalizeAci } from '../util/normalizeAci'; import { isAciString } from '../util/isAciString'; import { DurationInSeconds } from '../util/durations'; import * as Errors from '../types/errors'; import * as log from '../logging/log'; +import type { ContactAvatarType } from '../types/Avatar'; +import { computeHash } from '../Crypto'; +import { dropNull } from '../util/dropNull'; import Avatar = Proto.ContactDetails.IAvatar; const { Reader } = protobuf; -type OptionalFields = { avatar?: Avatar | null; expireTimer?: number | null }; - -type DecoderBase = { - decodeDelimited(reader: protobuf.Reader): Message | undefined; +type OptionalFields = { + avatar?: Avatar | null; + expireTimer?: number | null; + number?: string | null; }; -type HydratedAvatar = Avatar & { data: Uint8Array }; - type MessageWithAvatar = Omit< Message, - 'avatar' + 'avatar' | 'toJSON' > & { - avatar?: HydratedAvatar; + avatar?: ContactAvatarType; expireTimer?: DurationInSeconds; + number?: string | undefined; }; -export type ModifiedContactDetails = MessageWithAvatar; +export type ContactDetailsWithAvatar = MessageWithAvatar; -/* eslint-disable @typescript-eslint/brace-style -- Prettier conflicts with ESLint */ -abstract class ParserBase< - Message extends OptionalFields, - Decoder extends DecoderBase, - Result -> implements Iterable -{ - /* eslint-enable @typescript-eslint/brace-style */ +export async function parseContactsV2({ + absolutePath, +}: { + absolutePath: string; +}): Promise> { + const logId = 'parseContactsV2'; - protected readonly reader: protobuf.Reader; + const readStream = createReadStream(absolutePath); + const parseContactsTransform = new ParseContactsTransform(); - constructor(bytes: Uint8Array, private readonly decoder: Decoder) { - this.reader = new Reader(bytes); + try { + await pipeline(readStream, parseContactsTransform); + } catch (error) { + try { + readStream.close(); + } catch (cleanupError) { + log.error( + `${logId}: Failed to clean up after error`, + Errors.toLogFormat(cleanupError) + ); + } + + throw error; } - protected decodeDelimited(): MessageWithAvatar | undefined { - if (this.reader.pos === this.reader.len) { - return undefined; // eof + readStream.close(); + + return parseContactsTransform.contacts; +} + +// This transform pulls contacts and their avatars from a stream of bytes. This is tricky, +// because the chunk boundaries might fall in the middle of a contact or their avatar. +// So we are ready for decodeDelimited() to throw, and to keep activeContact around +// while we wait for more chunks to get to the expected avatar size. +// Note: exported only for testing +export class ParseContactsTransform extends Transform { + public contacts: Array = []; + + public activeContact: Proto.ContactDetails | undefined; + private unused: Uint8Array | undefined; + + override async _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ): Promise { + if (!chunk || chunk.byteLength === 0) { + done(); + return; } try { - const proto = this.decoder.decodeDelimited(this.reader); - - if (!proto) { - return undefined; + let data = chunk; + if (this.unused) { + data = Buffer.concat([this.unused, data]); + this.unused = undefined; } - let avatar: HydratedAvatar | undefined; - if (proto.avatar) { - const attachmentLen = proto.avatar.length ?? 0; - const avatarData = this.reader.buf.slice( - this.reader.pos, - this.reader.pos + attachmentLen - ); - this.reader.skip(attachmentLen); + const reader = Reader.create(data); + while (reader.pos < reader.len) { + const startPos = reader.pos; - avatar = { - ...proto.avatar, + if (!this.activeContact) { + try { + this.activeContact = Proto.ContactDetails.decodeDelimited(reader); + } catch (err) { + // We get a RangeError if there wasn't enough data to read the next record. + if (err instanceof RangeError) { + // Note: A failed decodeDelimited() does in fact update reader.pos, so we + // must reset to startPos + this.unused = data.subarray(startPos); + done(); + return; + } - data: avatarData, - }; + // Something deeper has gone wrong; the proto is malformed or something + done(err); + return; + } + } + + // Something has really gone wrong if the above parsing didn't throw but gave + // us nothing back. Let's end the parse. + if (!this.activeContact) { + done(new Error('ParseContactsTransform: No active contact!')); + return; + } + + const attachmentSize = this.activeContact?.avatar?.length ?? 0; + if (attachmentSize === 0) { + // No avatar attachment for this contact + const prepared = prepareContact(this.activeContact); + if (prepared) { + this.contacts.push(prepared); + } + this.activeContact = undefined; + + continue; + } + + const spaceLeftAfterRead = reader.len - (reader.pos + attachmentSize); + if (spaceLeftAfterRead >= 0) { + // We've read enough data to read the entire attachment + const avatarData = reader.buf.slice( + reader.pos, + reader.pos + attachmentSize + ); + const hash = computeHash(data); + + // eslint-disable-next-line no-await-in-loop + const path = await window.Signal.Migrations.writeNewAttachmentData( + avatarData + ); + + const prepared = prepareContact(this.activeContact, { + ...this.activeContact.avatar, + hash, + path, + }); + if (prepared) { + this.contacts.push(prepared); + } else { + // eslint-disable-next-line no-await-in-loop + await window.Signal.Migrations.deleteAttachmentData(path); + } + this.activeContact = undefined; + + reader.skip(attachmentSize); + } else { + // We have an attachment, but we haven't read enough data yet. We need to + // wait for another chunk. + this.unused = data.subarray(reader.pos); + done(); + return; + } } - let expireTimer: DurationInSeconds | undefined; - - if (proto.expireTimer != null) { - expireTimer = DurationInSeconds.fromSeconds(proto.expireTimer); - } - - return { - ...proto, - - avatar, - expireTimer, - }; + // No need to push; no downstream consumers! } catch (error) { - log.error('ProtoParser.next error:', Errors.toLogFormat(error)); - return undefined; + done(error); + return; } - } - public abstract next(): Result | undefined; - - *[Symbol.iterator](): Iterator { - let result = this.next(); - while (result !== undefined) { - yield result; - result = this.next(); - } + done(); } } -export class ContactBuffer extends ParserBase< - Proto.ContactDetails, - typeof Proto.ContactDetails, - ModifiedContactDetails -> { - constructor(arrayBuffer: Uint8Array) { - super(arrayBuffer, Proto.ContactDetails); - } +function prepareContact( + proto: Proto.ContactDetails, + avatar?: ContactAvatarType +): ContactDetailsWithAvatar | undefined { + const aci = proto.aci + ? normalizeAci(proto.aci, 'ContactBuffer.aci') + : proto.aci; - public override next(): ModifiedContactDetails | undefined { - while (this.reader.pos < this.reader.len) { - const proto = this.decodeDelimited(); - if (!proto) { - return undefined; - } + const expireTimer = + proto.expireTimer != null + ? DurationInSeconds.fromSeconds(proto.expireTimer) + : undefined; - if (!proto.aci) { - return proto; - } + const verified = + proto.verified && proto.verified.destinationAci + ? { + ...proto.verified, - const { verified } = proto; + destinationAci: normalizeAci( + proto.verified.destinationAci, + 'ContactBuffer.verified.destinationAci' + ), + } + : proto.verified; - if ( - !isAciString(proto.aci) || - (verified?.destinationAci && !isAciString(verified.destinationAci)) - ) { - continue; - } - - return { - ...proto, - - verified: - verified && verified.destinationAci - ? { - ...verified, - - destinationAci: normalizeAci( - verified.destinationAci, - 'ContactBuffer.verified.destinationAci' - ), - } - : verified, - - aci: normalizeAci(proto.aci, 'ContactBuffer.aci'), - }; - } + // We reject incoming contacts with invalid aci information + if ( + (proto.aci && !isAciString(proto.aci)) || + (proto.verified?.destinationAci && + !isAciString(proto.verified.destinationAci)) + ) { + log.warn('ParseContactsTransform: Dropping contact with invalid aci'); return undefined; } + + const result = { + ...proto, + expireTimer, + aci, + verified, + avatar, + number: dropNull(proto.number), + }; + + return result; } diff --git a/ts/textsecure/MessageReceiver.ts b/ts/textsecure/MessageReceiver.ts index c2e1425ce..8603427ae 100644 --- a/ts/textsecure/MessageReceiver.ts +++ b/ts/textsecure/MessageReceiver.ts @@ -6,6 +6,8 @@ import { isBoolean, isNumber, isString, omit } from 'lodash'; import PQueue from 'p-queue'; import { v4 as getGuid } from 'uuid'; +import { existsSync } from 'fs'; +import { removeSync } from 'fs-extra'; import type { SealedSenderDecryptionResult, @@ -49,7 +51,7 @@ import { parseIntOrThrow } from '../util/parseIntOrThrow'; import { clearTimeoutIfNecessary } from '../util/clearTimeoutIfNecessary'; import { Zone } from '../util/Zone'; import { DurationInSeconds, SECOND } from '../util/durations'; -import type { DownloadedAttachmentType } from '../types/Attachment'; +import type { AttachmentType } from '../types/Attachment'; import { Address } from '../types/Address'; import { QualifiedAddress } from '../types/QualifiedAddress'; import { normalizeStoryDistributionId } from '../types/StoryDistributionId'; @@ -81,9 +83,10 @@ import { import { processSyncMessage } from './processSyncMessage'; import type { EventHandler } from './EventTarget'; import EventTarget from './EventTarget'; -import { downloadAttachment } from './downloadAttachment'; +import { downloadAttachmentV2 } from './downloadAttachment'; import type { IncomingWebSocketRequest } from './WebsocketResources'; -import { ContactBuffer } from './ContactsParser'; +import type { ContactDetailsWithAvatar } from './ContactsParser'; +import { parseContactsV2 } from './ContactsParser'; import type { WebAPIType } from './WebAPI'; import type { Storage } from './Storage'; import { WarnOnlyError } from './Errors'; @@ -3504,11 +3507,11 @@ export default class MessageReceiver private async handleContacts( envelope: ProcessedEnvelope, - contacts: Proto.SyncMessage.IContacts + contactSyncProto: Proto.SyncMessage.IContacts ): Promise { const logId = getEnvelopeId(envelope); log.info(`MessageReceiver: handleContacts ${logId}`); - const { blob } = contacts; + const { blob } = contactSyncProto; if (!blob) { throw new Error('MessageReceiver.handleContacts: blob field was missing'); } @@ -3517,21 +3520,50 @@ export default class MessageReceiver this.removeFromCache(envelope); - const attachmentPointer = await this.handleAttachment(blob, { - disableRetries: true, - timeout: 90 * SECOND, - }); - const contactBuffer = new ContactBuffer(attachmentPointer.data); + let attachment: AttachmentType | undefined; + try { + attachment = await this.handleAttachmentV2(blob, { + disableRetries: true, + timeout: 90 * SECOND, + }); - const contactSync = new ContactSyncEvent( - Array.from(contactBuffer), - Boolean(contacts.complete), - envelope.receivedAtCounter, - envelope.timestamp - ); - await this.dispatchAndWait(logId, contactSync); + const { path } = attachment; + if (!path) { + throw new Error('Failed no path field in returned attachment'); + } + const absolutePath = + window.Signal.Migrations.getAbsoluteAttachmentPath(path); + if (!existsSync(absolutePath)) { + throw new Error( + 'Contact sync attachment had path, but it was not found on disk' + ); + } - log.info('handleContacts: finished'); + let contacts: ReadonlyArray; + try { + contacts = await parseContactsV2({ + absolutePath, + }); + } finally { + if (absolutePath) { + removeSync(absolutePath); + } + } + + const contactSync = new ContactSyncEvent( + contacts, + Boolean(contactSyncProto.complete), + envelope.receivedAtCounter, + envelope.timestamp + ); + await this.dispatchAndWait(logId, contactSync); + + log.info('handleContacts: finished'); + } finally { + if (attachment?.path) { + await window.Signal.Migrations.deleteAttachmentData(attachment.path); + } + } } private async handleBlocked( @@ -3618,12 +3650,12 @@ export default class MessageReceiver return this.storage.blocked.isGroupBlocked(groupId); } - private async handleAttachment( + private async handleAttachmentV2( attachment: Proto.IAttachmentPointer, options?: { timeout?: number; disableRetries?: boolean } - ): Promise { + ): Promise { const cleaned = processAttachment(attachment); - return downloadAttachment(this.server, cleaned, options); + return downloadAttachmentV2(this.server, cleaned, options); } private async handleEndSession( diff --git a/ts/textsecure/WebAPI.ts b/ts/textsecure/WebAPI.ts index 7c640617a..45d10726b 100644 --- a/ts/textsecure/WebAPI.ts +++ b/ts/textsecure/WebAPI.ts @@ -22,7 +22,10 @@ import * as durations from '../util/durations'; import type { ExplodePromiseResultType } from '../util/explodePromise'; import { explodePromise } from '../util/explodePromise'; import { getUserAgent } from '../util/getUserAgent'; -import { getStreamWithTimeout } from '../util/getStreamWithTimeout'; +import { + getTimeoutStream, + getStreamWithTimeout, +} from '../util/getStreamWithTimeout'; import { formatAcceptLanguageHeader } from '../util/userLanguages'; import { toWebSafeBase64, fromWebSafeBase64 } from '../util/webSafeBase64'; import { getBasicAuth } from '../util/getBasicAuth'; @@ -970,6 +973,14 @@ export type WebAPIType = { timeout?: number; } ) => Promise; + getAttachmentV2: ( + cdnKey: string, + cdnNumber?: number, + options?: { + disableRetries?: boolean; + timeout?: number; + } + ) => Promise; getAvatar: (path: string) => Promise; getHasSubscription: (subscriberId: Uint8Array) => Promise; getGroup: (options: GroupCredentialsType) => Promise; @@ -1386,6 +1397,7 @@ export function initialize({ getArtAuth, getArtProvisioningSocket, getAttachment, + getAttachmentV2, getAvatar, getBadgeImageFile, getConfig, @@ -2876,6 +2888,61 @@ export function initialize({ } } + async function getAttachmentV2( + cdnKey: string, + cdnNumber?: number, + options?: { + disableRetries?: boolean; + timeout?: number; + } + ): Promise { + const abortController = new AbortController(); + + const cdnUrl = isNumber(cdnNumber) + ? cdnUrlObject[cdnNumber] ?? cdnUrlObject['0'] + : cdnUrlObject['0']; + // This is going to the CDN, not the service, so we use _outerAjax + const downloadStream = await _outerAjax( + `${cdnUrl}/attachments/${cdnKey}`, + { + certificateAuthority, + disableRetries: options?.disableRetries, + proxyUrl, + responseType: 'stream', + timeout: options?.timeout || 0, + type: 'GET', + redactUrl: _createRedactor(cdnKey), + version, + abortSignal: abortController.signal, + } + ); + + const timeoutStream = getTimeoutStream({ + name: `getAttachment(${cdnKey})`, + timeout: GET_ATTACHMENT_CHUNK_TIMEOUT, + abortController, + }); + + const combinedStream = downloadStream + // We do this manually; pipe() doesn't flow errors through the streams for us + .on('error', (error: Error) => { + timeoutStream.emit('error', error); + }) + .pipe(timeoutStream); + + const cancelRequest = (error: Error) => { + combinedStream.emit('error', error); + abortController.abort(); + }; + registerInflightRequest(cancelRequest); + + combinedStream.on('done', () => { + unregisterInFlightRequest(cancelRequest); + }); + + return combinedStream; + } + async function putEncryptedAttachment(encryptedBin: Uint8Array) { const response = attachmentV3Response.parse( await _ajax({ diff --git a/ts/textsecure/authorizeArtCreator.ts b/ts/textsecure/authorizeArtCreator.ts index 9fe3759a5..167575e77 100644 --- a/ts/textsecure/authorizeArtCreator.ts +++ b/ts/textsecure/authorizeArtCreator.ts @@ -37,12 +37,12 @@ export async function authorizeArtCreator({ ); const keys = Bytes.concatenate([aesKey, macKey]); - const { ciphertext } = encryptAttachment( - Proto.ArtProvisioningMessage.encode({ + const { ciphertext } = encryptAttachment({ + plaintext: Proto.ArtProvisioningMessage.encode({ ...auth, }).finish(), - keys - ); + keys, + }); const envelope = Proto.ArtProvisioningEnvelope.encode({ publicKey: ourKeys.pubKey, diff --git a/ts/textsecure/downloadAttachment.ts b/ts/textsecure/downloadAttachment.ts index 4587d4b82..f961b8496 100644 --- a/ts/textsecure/downloadAttachment.ts +++ b/ts/textsecure/downloadAttachment.ts @@ -1,19 +1,40 @@ // Copyright 2020 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import { isNumber } from 'lodash'; +import { createWriteStream, existsSync, unlinkSync } from 'fs'; +import { isNumber, omit } from 'lodash'; +import type { Readable } from 'stream'; +import { Transform } from 'stream'; +import { pipeline } from 'stream/promises'; +import { ensureFile } from 'fs-extra'; +import * as log from '../logging/log'; +import * as Errors from '../types/errors'; import { strictAssert } from '../util/assert'; import { dropNull } from '../util/dropNull'; -import type { DownloadedAttachmentType } from '../types/Attachment'; +import { + AttachmentSizeError, + type AttachmentType, + type DownloadedAttachmentType, +} from '../types/Attachment'; import * as MIME from '../types/MIME'; import * as Bytes from '../Bytes'; -import { getFirstBytes, decryptAttachment } from '../Crypto'; +import { + getFirstBytes, + decryptAttachmentV1, + getAttachmentSizeBucket, +} from '../Crypto'; +import { + decryptAttachmentV2, + IV_LENGTH, + ATTACHMENT_MAC_LENGTH, +} from '../AttachmentCrypto'; import type { ProcessedAttachment } from './Types.d'; import type { WebAPIType } from './WebAPI'; +import { createName, getRelativePath } from '../windows/attachments'; -export async function downloadAttachment( +export async function downloadAttachmentV1( server: WebAPIType, attachment: ProcessedAttachment, options?: { @@ -28,7 +49,6 @@ export async function downloadAttachment( throw new Error('downloadAttachment: Attachment was missing cdnId!'); } - strictAssert(cdnId, 'attachment without cdnId'); const encrypted = await server.getAttachment( cdnId, dropNull(cdnNumber), @@ -41,9 +61,8 @@ export async function downloadAttachment( } strictAssert(key, 'attachment has no key'); - strictAssert(digest, 'attachment has no digest'); - const paddedData = decryptAttachment( + const paddedData = decryptAttachmentV1( encrypted, Bytes.fromBase64(key), Bytes.fromBase64(digest) @@ -67,3 +86,132 @@ export async function downloadAttachment( data, }; } + +export async function downloadAttachmentV2( + server: WebAPIType, + attachment: ProcessedAttachment, + options?: { + disableRetries?: boolean; + timeout?: number; + } +): Promise { + const { cdnId, cdnKey, cdnNumber, contentType, digest, key, size } = + attachment; + + const cdn = cdnId || cdnKey; + const logId = `downloadAttachmentV2(${cdn}):`; + + strictAssert(cdn, `${logId}: missing cdnId or cdnKey`); + strictAssert(digest, `${logId}: missing digest`); + strictAssert(key, `${logId}: missing key`); + strictAssert(isNumber(size), `${logId}: missing size`); + + const downloadStream = await server.getAttachmentV2( + cdn, + dropNull(cdnNumber), + options + ); + + const cipherTextRelativePath = await downloadToDisk({ downloadStream, size }); + const cipherTextAbsolutePath = + window.Signal.Migrations.getAbsoluteAttachmentPath(cipherTextRelativePath); + + const relativePath = await decryptAttachmentV2({ + ciphertextPath: cipherTextAbsolutePath, + id: cdn, + keys: Bytes.fromBase64(key), + size, + theirDigest: Bytes.fromBase64(digest), + }); + + if (existsSync(cipherTextAbsolutePath)) { + unlinkSync(cipherTextAbsolutePath); + } + + return { + ...omit(attachment, 'key'), + path: relativePath, + size, + contentType: contentType + ? MIME.stringToMIMEType(contentType) + : MIME.APPLICATION_OCTET_STREAM, + }; +} + +async function downloadToDisk({ + downloadStream, + size, +}: { + downloadStream: Readable; + size: number; +}): Promise { + const relativeTargetPath = getRelativePath(createName()); + const absoluteTargetPath = + window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath); + await ensureFile(absoluteTargetPath); + const writeStream = createWriteStream(absoluteTargetPath); + + const targetSize = + getAttachmentSizeBucket(size) * 1.05 + IV_LENGTH + ATTACHMENT_MAC_LENGTH; + const checkSizeTransform = new CheckSizeTransform(targetSize); + + try { + await pipeline(downloadStream, checkSizeTransform, writeStream); + } catch (error) { + try { + writeStream.close(); + if (absoluteTargetPath && existsSync(absoluteTargetPath)) { + unlinkSync(absoluteTargetPath); + } + } catch (cleanupError) { + log.error( + 'downloadToDisk: Error while cleaning up', + Errors.toLogFormat(cleanupError) + ); + } + + throw error; + } + + return relativeTargetPath; +} + +// A simple transform that throws if it sees more than maxBytes on the stream. +class CheckSizeTransform extends Transform { + private bytesSeen = 0; + + constructor(private maxBytes: number) { + super(); + } + + override _transform( + chunk: Buffer | undefined, + _encoding: string, + done: (error?: Error) => void + ) { + if (!chunk || chunk.byteLength === 0) { + done(); + return; + } + + try { + this.bytesSeen += chunk.byteLength; + + if (this.bytesSeen > this.maxBytes) { + done( + new AttachmentSizeError( + `CheckSizeTransform: Saw ${this.bytesSeen} bytes, max is ${this.maxBytes} bytes` + ) + ); + return; + } + + this.push(chunk); + } catch (error) { + done(error); + return; + } + + done(); + } +} diff --git a/ts/textsecure/index.ts b/ts/textsecure/index.ts index a625ca591..1fc16c9f5 100644 --- a/ts/textsecure/index.ts +++ b/ts/textsecure/index.ts @@ -5,7 +5,6 @@ import EventTarget from './EventTarget'; import AccountManager from './AccountManager'; import MessageReceiver from './MessageReceiver'; import utils from './Helpers'; -import { ContactBuffer } from './ContactsParser'; import SyncRequest from './SyncRequest'; import MessageSender from './SendMessage'; import { Storage } from './Storage'; @@ -17,7 +16,6 @@ export type TextSecureType = { storage: Storage; AccountManager: typeof AccountManager; - ContactBuffer: typeof ContactBuffer; EventTarget: typeof EventTarget; MessageReceiver: typeof MessageReceiver; MessageSender: typeof MessageSender; @@ -34,7 +32,6 @@ export const textsecure: TextSecureType = { storage: new Storage(), AccountManager, - ContactBuffer, EventTarget, MessageReceiver, MessageSender, diff --git a/ts/textsecure/messageReceiverEvents.ts b/ts/textsecure/messageReceiverEvents.ts index c06a9960c..1f96b1a6a 100644 --- a/ts/textsecure/messageReceiverEvents.ts +++ b/ts/textsecure/messageReceiverEvents.ts @@ -12,7 +12,7 @@ import type { ProcessedDataMessage, ProcessedSent, } from './Types.d'; -import type { ModifiedContactDetails } from './ContactsParser'; +import type { ContactDetailsWithAvatar } from './ContactsParser'; import type { CallEventDetails, CallLogEvent } from '../types/CallDisposition'; export class EmptyEvent extends Event { @@ -74,7 +74,7 @@ export class ErrorEvent extends Event { export class ContactSyncEvent extends Event { constructor( - public readonly contacts: ReadonlyArray, + public readonly contacts: ReadonlyArray, public readonly complete: boolean, public readonly receivedAtCounter: number, public readonly sentAt: number diff --git a/ts/types/Attachment.ts b/ts/types/Attachment.ts index c95ae4546..7dbbb0c7e 100644 --- a/ts/types/Attachment.ts +++ b/ts/types/Attachment.ts @@ -37,6 +37,8 @@ const MIN_HEIGHT = 50; // Used for display +export class AttachmentSizeError extends Error {} + export type AttachmentType = { error?: boolean; blurHash?: string; @@ -75,6 +77,7 @@ export type AttachmentType = { key?: string; data?: Uint8Array; textAttachment?: TextAttachmentType; + wasTooBig?: boolean; /** Legacy field. Used only for downloading old attachments */ id?: number; @@ -1008,9 +1011,9 @@ export const defaultBlurHash = (theme: ThemeType = ThemeType.light): string => { }; export const canBeDownloaded = ( - attachment: Pick + attachment: Pick ): boolean => { - return Boolean(attachment.key && attachment.digest); + return Boolean(attachment.digest && attachment.key && !attachment.wasTooBig); }; export function getAttachmentSignature(attachment: AttachmentType): string { diff --git a/ts/types/AttachmentSize.ts b/ts/types/AttachmentSize.ts index 6511c2104..23ea7b245 100644 --- a/ts/types/AttachmentSize.ts +++ b/ts/types/AttachmentSize.ts @@ -9,14 +9,14 @@ export const KIBIBYTE = 1024; const MEBIBYTE = 1024 * 1024; const DEFAULT_MAX = 100 * MEBIBYTE; -export const getMaximumAttachmentSizeInKb = ( +export const getMaximumOutgoingAttachmentSizeInKb = ( getValue: typeof RemoteConfig.getValue ): number => { try { return ( parseIntOrThrow( getValue('global.attachments.maxBytes'), - 'preProcessAttachment/maxAttachmentSize' + 'getMaximumOutgoingAttachmentSizeInKb' ) / KIBIBYTE ); } catch (error) { @@ -27,6 +27,22 @@ export const getMaximumAttachmentSizeInKb = ( } }; +export const getMaximumIncomingAttachmentSizeInKb = ( + getValue: typeof RemoteConfig.getValue +): number => { + try { + return ( + parseIntOrThrow( + getValue('global.attachments.maxReceiveBytes'), + 'getMaximumIncomingAttachmentSizeInKb' + ) / KIBIBYTE + ); + } catch (_error) { + // TODO: DESKTOP-5913. We're not gonna log until the new flag is fully deployed + return getMaximumOutgoingAttachmentSizeInKb(getValue) * 1.25; + } +}; + export function getRenderDetailsForLimit(limitKb: number): { limit: number; units: string; diff --git a/ts/types/Avatar.ts b/ts/types/Avatar.ts index c6bd46d5d..974e9fc56 100644 --- a/ts/types/Avatar.ts +++ b/ts/types/Avatar.ts @@ -34,6 +34,12 @@ export const GroupAvatarIcons = [ 'surfboard', ] as const; +export type ContactAvatarType = { + path: string; + url?: string; + hash?: string; +}; + type GroupAvatarIconType = typeof GroupAvatarIcons[number]; type PersonalAvatarIconType = typeof PersonalAvatarIcons[number]; diff --git a/ts/types/Conversation.ts b/ts/types/Conversation.ts index 578bb1307..5aeb38722 100644 --- a/ts/types/Conversation.ts +++ b/ts/types/Conversation.ts @@ -2,33 +2,87 @@ // SPDX-License-Identifier: AGPL-3.0-only import type { ConversationAttributesType } from '../model-types.d'; +import type { ContactAvatarType } from './Avatar'; import { computeHash } from '../Crypto'; export type BuildAvatarUpdaterOptions = Readonly<{ + data?: Uint8Array; + newAvatar?: ContactAvatarType; deleteAttachmentData: (path: string) => Promise; doesAttachmentExist: (path: string) => Promise; writeNewAttachmentData: (data: Uint8Array) => Promise; }>; +// This function is ready to handle raw avatar data as well as an avatar which has +// already been downloaded to disk. +// Scenarios that go to disk today: +// - During a contact sync (see ContactsParser.ts) +// Scenarios that stay in memory today: +// - models/Conversations/setProfileAvatar function buildAvatarUpdater({ field }: { field: 'avatar' | 'profileAvatar' }) { return async ( conversation: Readonly, - data: Uint8Array, { + data, + newAvatar, deleteAttachmentData, doesAttachmentExist, writeNewAttachmentData, }: BuildAvatarUpdaterOptions ): Promise => { - if (!conversation) { + if (!conversation || (!data && !newAvatar)) { return conversation; } - const avatar = conversation[field]; + const oldAvatar = conversation[field]; + const newHash = data ? computeHash(data) : undefined; - const newHash = computeHash(data); + if (!oldAvatar || !oldAvatar.hash) { + if (newAvatar) { + return { + ...conversation, + [field]: newAvatar, + }; + } + if (data) { + return { + ...conversation, + [field]: { + hash: newHash, + path: await writeNewAttachmentData(data), + }, + }; + } + throw new Error('buildAvatarUpdater: neither newAvatar or newData'); + } - if (!avatar || !avatar.hash) { + const { hash, path } = oldAvatar; + const exists = await doesAttachmentExist(path); + if (!exists) { + window.SignalContext.log.warn( + `Conversation.buildAvatarUpdater: attachment ${path} did not exist` + ); + } + + if (exists) { + if (newAvatar && hash && hash === newAvatar.hash) { + await deleteAttachmentData(newAvatar.path); + return conversation; + } + if (data && hash && hash === newHash) { + return conversation; + } + } + + await deleteAttachmentData(path); + + if (newAvatar) { + return { + ...conversation, + [field]: newAvatar, + }; + } + if (data) { return { ...conversation, [field]: { @@ -38,27 +92,7 @@ function buildAvatarUpdater({ field }: { field: 'avatar' | 'profileAvatar' }) { }; } - const { hash, path } = avatar; - const exists = await doesAttachmentExist(path); - if (!exists) { - window.SignalContext.log.warn( - `Conversation.buildAvatarUpdater: attachment ${path} did not exist` - ); - } - - if (exists && hash === newHash) { - return conversation; - } - - await deleteAttachmentData(path); - - return { - ...conversation, - [field]: { - hash: newHash, - path: await writeNewAttachmentData(data), - }, - }; + throw new Error('buildAvatarUpdater: neither newAvatar or newData'); }; } diff --git a/ts/types/Message2.ts b/ts/types/Message2.ts index 34cc76dbd..4b5d36f73 100644 --- a/ts/types/Message2.ts +++ b/ts/types/Message2.ts @@ -593,10 +593,18 @@ export const processNewAttachment = async ( isIncoming: true, } ); - const onDiskAttachment = await migrateDataToFileSystem(rotatedAttachment, { - writeNewAttachmentData, - logger, - }); + + let onDiskAttachment = rotatedAttachment; + + // If we rotated the attachment, then `data` will be the actual bytes of the attachment, + // in memory. We want that updated attachment to go back to disk. + if (rotatedAttachment.data) { + onDiskAttachment = await migrateDataToFileSystem(rotatedAttachment, { + writeNewAttachmentData, + logger, + }); + } + const finalAttachment = await captureDimensionsAndScreenshot( onDiskAttachment, { diff --git a/ts/types/Stickers.ts b/ts/types/Stickers.ts index 1b017797f..d8a99801b 100644 --- a/ts/types/Stickers.ts +++ b/ts/types/Stickers.ts @@ -11,7 +11,7 @@ import { makeLookup } from '../util/makeLookup'; import { maybeParseUrl } from '../util/url'; import * as Bytes from '../Bytes'; import * as Errors from './errors'; -import { deriveStickerPackKey, decryptAttachment } from '../Crypto'; +import { deriveStickerPackKey, decryptAttachmentV1 } from '../Crypto'; import { IMAGE_WEBP } from './MIME'; import type { MIMEType } from './MIME'; import { sniffImageMimeType } from '../util/sniffImageMimeType'; @@ -310,7 +310,10 @@ function getReduxStickerActions() { function decryptSticker(packKey: string, ciphertext: Uint8Array): Uint8Array { const binaryKey = Bytes.fromBase64(packKey); const derivedKey = deriveStickerPackKey(binaryKey); - const plaintext = decryptAttachment(ciphertext, derivedKey); + + // Note this download and decrypt in memory is okay because these files are maximum + // 300kb, enforced by the server. + const plaintext = decryptAttachmentV1(ciphertext, derivedKey); return plaintext; } diff --git a/ts/util/attachments.ts b/ts/util/attachments.ts index cb65fc6ea..bad7fbd8d 100644 --- a/ts/util/attachments.ts +++ b/ts/util/attachments.ts @@ -42,17 +42,28 @@ export async function autoOrientJPEG( // already been scaled to level, oriented, stripped of exif data, and saved // in high quality format. If we want to send the image in HQ we can return // the attachment as-is. Otherwise we'll have to further scale it down. - if (!attachment.data || sendHQImages) { + const { data, path, size } = attachment; + + if (sendHQImages) { return attachment; } + let scaleTarget: string | Blob; + if (path) { + scaleTarget = window.Signal.Migrations.getAbsoluteAttachmentPath(path); + } else { + if (!data) { + return attachment; + } + scaleTarget = new Blob([data], { + type: attachment.contentType, + }); + } - const dataBlob = new Blob([attachment.data], { - type: attachment.contentType, - }); try { const { blob: xcodedDataBlob } = await scaleImageToLevel( - dataBlob, + scaleTarget, attachment.contentType, + size, isIncoming ); const xcodedDataArrayBuffer = await blobToArrayBuffer(xcodedDataBlob); diff --git a/ts/util/downloadAttachment.ts b/ts/util/downloadAttachment.ts index 7275018b7..0be52b948 100644 --- a/ts/util/downloadAttachment.ts +++ b/ts/util/downloadAttachment.ts @@ -1,15 +1,12 @@ // Copyright 2020 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { - AttachmentType, - DownloadedAttachmentType, -} from '../types/Attachment'; -import { downloadAttachment as doDownloadAttachment } from '../textsecure/downloadAttachment'; +import type { AttachmentType } from '../types/Attachment'; +import { downloadAttachmentV2 as doDownloadAttachment } from '../textsecure/downloadAttachment'; export async function downloadAttachment( attachmentData: AttachmentType -): Promise { +): Promise { let migratedAttachment: AttachmentType; const { server } = window.textsecure; diff --git a/ts/util/getNotificationDataForMessage.ts b/ts/util/getNotificationDataForMessage.ts index c82c02364..58fd7484f 100644 --- a/ts/util/getNotificationDataForMessage.ts +++ b/ts/util/getNotificationDataForMessage.ts @@ -297,6 +297,16 @@ export function getNotificationDataForMessage( const attachment = attachments[0] || {}; const { contentType } = attachment; + const tooBigAttachmentCount = attachments.filter( + item => item.wasTooBig + ).length; + if (tooBigAttachmentCount === attachments.length) { + return { + emoji: '📎', + text: window.i18n('icu:message--attachmentTooBig--one'), + }; + } + if (contentType === MIME.IMAGE_GIF || Attachment.isGIF(attachments)) { return { bodyRanges, diff --git a/ts/util/getStreamWithTimeout.ts b/ts/util/getStreamWithTimeout.ts index c9f14c0bb..2bc88426c 100644 --- a/ts/util/getStreamWithTimeout.ts +++ b/ts/util/getStreamWithTimeout.ts @@ -1,6 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only +import { Transform } from 'stream'; import type { Readable } from 'stream'; import * as Bytes from '../Bytes'; @@ -59,3 +60,46 @@ export function getStreamWithTimeout( return promise; } + +export function getTimeoutStream({ + name, + timeout, + abortController, +}: OptionsType): Transform { + const timeoutStream = new Transform(); + + let timer: NodeJS.Timeout | undefined; + const clearTimer = () => { + clearTimeoutIfNecessary(timer); + timer = undefined; + }; + + const reset = () => { + clearTimer(); + + timer = setTimeout(() => { + abortController.abort(); + timeoutStream.emit( + 'error', + new StreamTimeoutError(`getStreamWithTimeout(${name}) timed out`) + ); + clearTimer(); + }, timeout); + }; + + timeoutStream._transform = function transform(chunk, _encoding, done) { + try { + reset(); + } catch (error) { + return done(error); + } + + this.push(chunk); + + done(); + }; + + reset(); + + return timeoutStream; +} diff --git a/ts/util/handleImageAttachment.ts b/ts/util/handleImageAttachment.ts index 09fe1422c..67f7cad6c 100644 --- a/ts/util/handleImageAttachment.ts +++ b/ts/util/handleImageAttachment.ts @@ -82,6 +82,7 @@ export async function autoScale({ const { blob, contentType: newContentType } = await scaleImageToLevel( file, contentType, + file.size, true ); diff --git a/ts/util/processAttachment.ts b/ts/util/processAttachment.ts index 4e86fa8cd..016d68a20 100644 --- a/ts/util/processAttachment.ts +++ b/ts/util/processAttachment.ts @@ -7,7 +7,7 @@ import type { InMemoryAttachmentDraftType, } from '../types/Attachment'; import { - getMaximumAttachmentSizeInKb, + getMaximumOutgoingAttachmentSizeInKb, getRenderDetailsForLimit, KIBIBYTE, } from '../types/AttachmentSize'; @@ -75,7 +75,7 @@ export async function processAttachment( } function isAttachmentSizeOkay(attachment: Readonly): boolean { - const limitKb = getMaximumAttachmentSizeInKb(getRemoteConfigValue); + const limitKb = getMaximumOutgoingAttachmentSizeInKb(getRemoteConfigValue); // this needs to be cast properly // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore diff --git a/ts/util/scaleImageToLevel.ts b/ts/util/scaleImageToLevel.ts index a0b632015..482d076f7 100644 --- a/ts/util/scaleImageToLevel.ts +++ b/ts/util/scaleImageToLevel.ts @@ -109,8 +109,9 @@ async function getCanvasBlobAsJPEG( } export async function scaleImageToLevel( - fileOrBlobOrURL: File | Blob, + fileOrBlobOrURL: File | Blob | string, contentType: MIMEType, + size: number, sendAsHighQuality?: boolean ): Promise<{ blob: Blob; @@ -136,10 +137,14 @@ export async function scaleImageToLevel( const level = sendAsHighQuality ? MediaQualityLevels.Three : getMediaQualityLevel(); - const { maxDimensions, quality, size, thresholdSize } = - MEDIA_QUALITY_LEVEL_DATA.get(level) || DEFAULT_LEVEL_DATA; + const { + maxDimensions, + quality, + size: targetSize, + thresholdSize, + } = MEDIA_QUALITY_LEVEL_DATA.get(level) || DEFAULT_LEVEL_DATA; - if (fileOrBlobOrURL.size <= thresholdSize) { + if (size <= thresholdSize) { // Always encode through canvas as a temporary fix for a library bug const blob: Blob = await canvasToBlob(data.image, contentType); return { @@ -161,7 +166,7 @@ export async function scaleImageToLevel( scalableDimensions, quality ); - if (blob.size <= size) { + if (blob.size <= targetSize) { return { blob, contentType: IMAGE_JPEG, diff --git a/ts/util/uploadAttachment.ts b/ts/util/uploadAttachment.ts index e554bbab2..555510a03 100644 --- a/ts/util/uploadAttachment.ts +++ b/ts/util/uploadAttachment.ts @@ -13,18 +13,22 @@ export async function uploadAttachment( attachment: AttachmentWithHydratedData ): Promise { const keys = getRandomBytes(64); - const encrypted = padAndEncryptAttachment(attachment.data, keys); + const encrypted = padAndEncryptAttachment({ + plaintext: attachment.data, + keys, + }); const { server } = window.textsecure; strictAssert(server, 'WebAPI must be initialized'); const cdnKey = await server.putEncryptedAttachment(encrypted.ciphertext); + const size = attachment.data.byteLength; return { cdnKey, cdnNumber: 2, key: keys, - size: attachment.data.byteLength, + size, digest: encrypted.digest, contentType: MIMETypeToString(attachment.contentType),