This commit is contained in:
Tom Moor
2023-01-29 06:28:57 -08:00
committed by GitHub
parent f0d9bb4898
commit 85ca25371c
9 changed files with 46 additions and 29 deletions

View File

@@ -72,9 +72,9 @@ class Attachment extends IdModel {
} }
/** /**
* Get the contents of this attachment as a Buffer * Get the contents of this attachment as a readable stream.
*/ */
get buffer() { get stream() {
return getFileByKey(this.key); return getFileByKey(this.key);
} }

View File

@@ -58,6 +58,9 @@ class FileOperation extends IdModel {
@Column(DataType.BIGINT) @Column(DataType.BIGINT)
size: number; size: number;
/**
* Mark the current file operation as expired and remove the file from storage.
*/
expire = async function () { expire = async function () {
this.state = "expired"; this.state = "expired";
try { try {
@@ -70,7 +73,10 @@ class FileOperation extends IdModel {
await this.save(); await this.save();
}; };
get buffer() { /**
* The file operation contents as a readable stream.
*/
get stream() {
return getFileByKey(this.key); return getFileByKey(this.key);
} }

View File

@@ -55,10 +55,10 @@ export default abstract class ExportDocumentTreeTask extends ExportTask {
await Promise.all( await Promise.all(
attachments.map(async (attachment) => { attachments.map(async (attachment) => {
try { try {
const img = await getFileByKey(attachment.key); const stream = getFileByKey(attachment.key);
const dir = path.dirname(pathInZip); const dir = path.dirname(pathInZip);
if (img) { if (stream) {
zip.file(path.join(dir, attachment.key), img as Blob, { zip.file(path.join(dir, attachment.key), stream, {
createFolders: true, createFolders: true,
}); });
} }

View File

@@ -10,9 +10,9 @@ setupTestDatabase();
describe("ImportMarkdownZipTask", () => { describe("ImportMarkdownZipTask", () => {
it("should import the documents, attachments", async () => { it("should import the documents, attachments", async () => {
const fileOperation = await buildFileOperation(); const fileOperation = await buildFileOperation();
Object.defineProperty(fileOperation, "buffer", { Object.defineProperty(fileOperation, "stream", {
get() { get() {
return fs.readFileSync( return fs.createReadStream(
path.resolve(__dirname, "..", "..", "test", "fixtures", "outline.zip") path.resolve(__dirname, "..", "..", "test", "fixtures", "outline.zip")
); );
}, },
@@ -33,9 +33,9 @@ describe("ImportMarkdownZipTask", () => {
it("should throw an error with corrupt zip", async () => { it("should throw an error with corrupt zip", async () => {
const fileOperation = await buildFileOperation(); const fileOperation = await buildFileOperation();
Object.defineProperty(fileOperation, "buffer", { Object.defineProperty(fileOperation, "stream", {
get() { get() {
return fs.readFileSync( return fs.createReadStream(
path.resolve(__dirname, "..", "..", "test", "fixtures", "corrupt.zip") path.resolve(__dirname, "..", "..", "test", "fixtures", "corrupt.zip")
); );
}, },
@@ -59,9 +59,9 @@ describe("ImportMarkdownZipTask", () => {
it("should throw an error with empty collection in zip", async () => { it("should throw an error with empty collection in zip", async () => {
const fileOperation = await buildFileOperation(); const fileOperation = await buildFileOperation();
Object.defineProperty(fileOperation, "buffer", { Object.defineProperty(fileOperation, "stream", {
get() { get() {
return fs.readFileSync( return fs.createReadStream(
path.resolve(__dirname, "..", "..", "test", "fixtures", "empty.zip") path.resolve(__dirname, "..", "..", "test", "fixtures", "empty.zip")
); );
}, },

View File

@@ -10,10 +10,10 @@ import ImportTask, { StructuredImportData } from "./ImportTask";
export default class ImportMarkdownZipTask extends ImportTask { export default class ImportMarkdownZipTask extends ImportTask {
public async parseData( public async parseData(
buffer: Buffer, stream: NodeJS.ReadableStream,
fileOperation: FileOperation fileOperation: FileOperation
): Promise<StructuredImportData> { ): Promise<StructuredImportData> {
const zip = await JSZip.loadAsync(buffer); const zip = await JSZip.loadAsync(stream);
const tree = ZipHelper.toFileTree(zip); const tree = ZipHelper.toFileTree(zip);
return this.parseFileTree({ fileOperation, zip, tree }); return this.parseFileTree({ fileOperation, zip, tree });

View File

@@ -10,9 +10,9 @@ setupTestDatabase();
describe("ImportNotionTask", () => { describe("ImportNotionTask", () => {
it("should import successfully from a Markdown export", async () => { it("should import successfully from a Markdown export", async () => {
const fileOperation = await buildFileOperation(); const fileOperation = await buildFileOperation();
Object.defineProperty(fileOperation, "buffer", { Object.defineProperty(fileOperation, "stream", {
get() { get() {
return fs.readFileSync( return fs.createReadStream(
path.resolve( path.resolve(
__dirname, __dirname,
"..", "..",
@@ -45,9 +45,9 @@ describe("ImportNotionTask", () => {
it("should import successfully from a HTML export", async () => { it("should import successfully from a HTML export", async () => {
const fileOperation = await buildFileOperation(); const fileOperation = await buildFileOperation();
Object.defineProperty(fileOperation, "buffer", { Object.defineProperty(fileOperation, "stream", {
get() { get() {
return fs.readFileSync( return fs.createReadStream(
path.resolve( path.resolve(
__dirname, __dirname,
"..", "..",

View File

@@ -11,10 +11,10 @@ import ImportTask, { StructuredImportData } from "./ImportTask";
export default class ImportNotionTask extends ImportTask { export default class ImportNotionTask extends ImportTask {
public async parseData( public async parseData(
buffer: Buffer, stream: NodeJS.ReadableStream,
fileOperation: FileOperation fileOperation: FileOperation
): Promise<StructuredImportData> { ): Promise<StructuredImportData> {
const zip = await JSZip.loadAsync(buffer); const zip = await JSZip.loadAsync(stream);
const tree = ZipHelper.toFileTree(zip); const tree = ZipHelper.toFileTree(zip);
return this.parseFileTree({ fileOperation, zip, tree }); return this.parseFileTree({ fileOperation, zip, tree });
} }

View File

@@ -161,15 +161,27 @@ export default abstract class ImportTask extends BaseTask<Props> {
} }
/** /**
* Fetch the remote data needed for the import, by default this will download * Fetch the remote data associated with the file operation as a Buffer.
* any file associated with the FileOperation, save it to a temporary file,
* and return the path.
* *
* @param fileOperation The FileOperation to fetch data for * @param fileOperation The FileOperation to fetch data for
* @returns string * @returns A promise that resolves to the data as a buffer.
*/ */
protected async fetchData(fileOperation: FileOperation) { protected async fetchData(fileOperation: FileOperation): Promise<Buffer> {
return fileOperation.buffer; return new Promise((resolve, reject) => {
const bufs: Buffer[] = [];
const stream = fileOperation.stream;
if (!stream) {
return reject(new Error("No stream available"));
}
stream.on("data", function (d) {
bufs.push(d);
});
stream.on("error", reject);
stream.on("end", () => {
resolve(Buffer.concat(bufs));
});
});
} }
/** /**

View File

@@ -185,15 +185,14 @@ export const getAWSKeyForFileOp = (teamId: string, name: string) => {
return `${bucket}/${teamId}/${uuidv4()}/${name}-export.zip`; return `${bucket}/${teamId}/${uuidv4()}/${name}-export.zip`;
}; };
export const getFileByKey = async (key: string) => { export const getFileByKey = (key: string) => {
const params = { const params = {
Bucket: AWS_S3_UPLOAD_BUCKET_NAME, Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key, Key: key,
}; };
try { try {
const data = await s3.getObject(params).promise(); return s3.getObject(params).createReadStream();
return data.Body || null;
} catch (err) { } catch (err) {
Logger.error("Error getting file from S3 by key", err, { Logger.error("Error getting file from S3 by key", err, {
key, key,