fix: Refactor attachment downloads during export to use promises (#5294
* Refactor attachment downloads during export to use promises instead of streams Date attachments in zip file correctly * tsc
This commit is contained in:
@@ -13,8 +13,9 @@ import {
|
||||
import {
|
||||
publicS3Endpoint,
|
||||
deleteFromS3,
|
||||
getFileByKey,
|
||||
getFileStream,
|
||||
getSignedUrl,
|
||||
getFileBuffer,
|
||||
} from "@server/utils/s3";
|
||||
import Document from "./Document";
|
||||
import Team from "./Team";
|
||||
@@ -75,7 +76,14 @@ class Attachment extends IdModel {
|
||||
* Get the contents of this attachment as a readable stream.
|
||||
*/
|
||||
get stream() {
|
||||
return getFileByKey(this.key);
|
||||
return getFileStream(this.key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the contents of this attachment as a buffer.
|
||||
*/
|
||||
get buffer() {
|
||||
return getFileBuffer(this.key);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
FileOperationState,
|
||||
FileOperationType,
|
||||
} from "@shared/types";
|
||||
import { deleteFromS3, getFileByKey } from "@server/utils/s3";
|
||||
import { deleteFromS3, getFileStream } from "@server/utils/s3";
|
||||
import Collection from "./Collection";
|
||||
import Team from "./Team";
|
||||
import User from "./User";
|
||||
@@ -77,7 +77,7 @@ class FileOperation extends IdModel {
|
||||
* The file operation contents as a readable stream.
|
||||
*/
|
||||
get stream() {
|
||||
return getFileByKey(this.key);
|
||||
return getFileStream(this.key);
|
||||
}
|
||||
|
||||
// hooks
|
||||
|
||||
@@ -9,7 +9,6 @@ import DocumentHelper from "@server/models/helpers/DocumentHelper";
|
||||
import ZipHelper from "@server/utils/ZipHelper";
|
||||
import { serializeFilename } from "@server/utils/fs";
|
||||
import parseAttachmentIds from "@server/utils/parseAttachmentIds";
|
||||
import { getFileByKey } from "@server/utils/s3";
|
||||
import ExportTask from "./ExportTask";
|
||||
|
||||
export default abstract class ExportDocumentTreeTask extends ExportTask {
|
||||
@@ -65,13 +64,11 @@ export default abstract class ExportDocumentTreeTask extends ExportTask {
|
||||
key: attachment.key,
|
||||
});
|
||||
|
||||
const stream = getFileByKey(attachment.key);
|
||||
const dir = path.dirname(pathInZip);
|
||||
if (stream) {
|
||||
zip.file(path.join(dir, attachment.key), stream, {
|
||||
createFolders: true,
|
||||
});
|
||||
}
|
||||
zip.file(path.join(dir, attachment.key), attachment.buffer, {
|
||||
date: attachment.updatedAt,
|
||||
createFolders: true,
|
||||
});
|
||||
} catch (err) {
|
||||
Logger.error(
|
||||
`Failed to add attachment to archive: ${attachment.key}`,
|
||||
|
||||
@@ -16,7 +16,6 @@ import { CollectionJSONExport, JSONExportMetadata } from "@server/types";
|
||||
import ZipHelper from "@server/utils/ZipHelper";
|
||||
import { serializeFilename } from "@server/utils/fs";
|
||||
import parseAttachmentIds from "@server/utils/parseAttachmentIds";
|
||||
import { getFileByKey } from "@server/utils/s3";
|
||||
import packageJson from "../../../package.json";
|
||||
import ExportTask from "./ExportTask";
|
||||
|
||||
@@ -86,12 +85,10 @@ export default class ExportJSONTask extends ExportTask {
|
||||
await Promise.all(
|
||||
attachments.map(async (attachment) => {
|
||||
try {
|
||||
const stream = getFileByKey(attachment.key);
|
||||
if (stream) {
|
||||
zip.file(attachment.key, stream, {
|
||||
createFolders: true,
|
||||
});
|
||||
}
|
||||
zip.file(attachment.key, attachment.buffer, {
|
||||
date: attachment.updatedAt,
|
||||
createFolders: true,
|
||||
});
|
||||
|
||||
output.attachments[attachment.id] = {
|
||||
...omit(presentAttachment(attachment), "url"),
|
||||
|
||||
@@ -3,6 +3,7 @@ import path from "path";
|
||||
import JSZip from "jszip";
|
||||
import { find } from "lodash";
|
||||
import tmp from "tmp";
|
||||
import { bytesToHumanReadable } from "@shared/utils/files";
|
||||
import { ValidationError } from "@server/errors";
|
||||
import Logger from "@server/logging/Logger";
|
||||
import { trace } from "@server/logging/tracing";
|
||||
@@ -114,10 +115,14 @@ export default class ZipHelper {
|
||||
currentFile: metadata.currentFile,
|
||||
percent,
|
||||
};
|
||||
const memory = process.memoryUsage();
|
||||
Logger.debug(
|
||||
"utils",
|
||||
`Writing zip file progress… %${percent}`,
|
||||
{ currentFile: metadata.currentFile }
|
||||
`Writing zip file progress… ${percent}%`,
|
||||
{
|
||||
currentFile: metadata.currentFile,
|
||||
memory: bytesToHumanReadable(memory.rss),
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,19 +184,34 @@ export const getAWSKeyForFileOp = (teamId: string, name: string) => {
|
||||
return `${bucket}/${teamId}/${uuidv4()}/${name}-export.zip`;
|
||||
};
|
||||
|
||||
export const getFileByKey = (key: string) => {
|
||||
const params = {
|
||||
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
export const getFileStream = (key: string) => {
|
||||
try {
|
||||
return s3.getObject(params).createReadStream();
|
||||
return s3
|
||||
.getObject({
|
||||
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
|
||||
Key: key,
|
||||
})
|
||||
.createReadStream();
|
||||
} catch (err) {
|
||||
Logger.error("Error getting file from S3 by key", err, {
|
||||
Logger.error("Error getting file stream from S3 ", err, {
|
||||
key,
|
||||
});
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
export const getFileBuffer = async (key: string) => {
|
||||
const response = await s3
|
||||
.getObject({
|
||||
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
|
||||
Key: key,
|
||||
})
|
||||
.promise();
|
||||
|
||||
if (response.Body) {
|
||||
return response.Body as Blob;
|
||||
}
|
||||
|
||||
throw new Error("Error getting file buffer from S3");
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user