fix: Protect against exports larger than memory/max

This commit is contained in:
Tom Moor
2023-09-02 22:11:53 -04:00
parent 0054b7152e
commit 093ee74a90
3 changed files with 65 additions and 1 deletions

View File

@@ -5,6 +5,7 @@ require("dotenv").config({
silent: true, silent: true,
}); });
import os from "os";
import { import {
validate, validate,
IsNotEmpty, IsNotEmpty,
@@ -622,6 +623,14 @@ export class Environment {
this.AWS_S3_UPLOAD_MAX_SIZE this.AWS_S3_UPLOAD_MAX_SIZE
); );
/**
* Limit on export size in bytes. Defaults to the total memory available to
* the container.
*/
@IsNumber()
public MAXIMUM_EXPORT_SIZE =
this.toOptionalNumber(process.env.MAXIMUM_EXPORT_SIZE) ?? os.totalmem();
/** /**
* Iframely url * Iframely url
*/ */

View File

@@ -1,4 +1,5 @@
import path from "path"; import path from "path";
import { QueryTypes } from "sequelize";
import { import {
BeforeDestroy, BeforeDestroy,
BelongsTo, BelongsTo,
@@ -116,6 +117,30 @@ class Attachment extends IdModel {
await FileStorage.deleteFile(model.key); await FileStorage.deleteFile(model.key);
} }
// static methods
/**
* Get the total size of all attachments for a given team.
*
* @param teamId - The ID of the team to get the total size for.
* @returns A promise resolving to the total size of all attachments for the given team in bytes.
*/
static async getTotalSizeForTeam(teamId: string): Promise<number> {
const result = await this.sequelize!.query<{ total: string }>(
`
SELECT SUM(size) as total
FROM attachments
WHERE "teamId" = :teamId
`,
{
replacements: { teamId },
type: QueryTypes.SELECT,
}
);
return parseInt(result?.[0]?.total ?? "0", 10);
}
// associations // associations
@BelongsTo(() => Team, "teamId") @BelongsTo(() => Team, "teamId")

View File

@@ -1,10 +1,20 @@
import fs from "fs"; import fs from "fs";
import truncate from "lodash/truncate"; import truncate from "lodash/truncate";
import { FileOperationState, NotificationEventType } from "@shared/types"; import { FileOperationState, NotificationEventType } from "@shared/types";
import { bytesToHumanReadable } from "@shared/utils/files";
import ExportFailureEmail from "@server/emails/templates/ExportFailureEmail"; import ExportFailureEmail from "@server/emails/templates/ExportFailureEmail";
import ExportSuccessEmail from "@server/emails/templates/ExportSuccessEmail"; import ExportSuccessEmail from "@server/emails/templates/ExportSuccessEmail";
import env from "@server/env";
import { ValidationError } from "@server/errors";
import Logger from "@server/logging/Logger"; import Logger from "@server/logging/Logger";
import { Collection, Event, FileOperation, Team, User } from "@server/models"; import {
Attachment,
Collection,
Event,
FileOperation,
Team,
User,
} from "@server/models";
import fileOperationPresenter from "@server/presenters/fileOperation"; import fileOperationPresenter from "@server/presenters/fileOperation";
import FileStorage from "@server/storage/files"; import FileStorage from "@server/storage/files";
import BaseTask, { TaskPriority } from "./BaseTask"; import BaseTask, { TaskPriority } from "./BaseTask";
@@ -43,6 +53,26 @@ export default abstract class ExportTask extends BaseTask<Props> {
let filePath: string | undefined; let filePath: string | undefined;
try { try {
if (!fileOperation.collectionId) {
const totalAttachmentsSize = await Attachment.getTotalSizeForTeam(
user.teamId
);
if (
fileOperation.includeAttachments &&
env.MAXIMUM_EXPORT_SIZE &&
totalAttachmentsSize > env.MAXIMUM_EXPORT_SIZE
) {
throw ValidationError(
`${bytesToHumanReadable(
totalAttachmentsSize
)} of attachments in workspace is larger than maximum export size of ${bytesToHumanReadable(
env.MAXIMUM_EXPORT_SIZE
)}.`
);
}
}
Logger.info("task", `ExportTask processing data for ${fileOperationId}`, { Logger.info("task", `ExportTask processing data for ${fileOperationId}`, {
includeAttachments: fileOperation.includeAttachments, includeAttachments: fileOperation.includeAttachments,
}); });