chore: Refactor data import (#3434)

* Complete refactor of import

* feat: Notion data import (#3442)
This commit is contained in:
Tom Moor
2022-04-23 10:07:35 -07:00
committed by GitHub
parent bdcfaae025
commit 33ce49cc33
45 changed files with 2217 additions and 1066 deletions

View File

@@ -4,6 +4,7 @@ import ExportFailureEmail from "@server/emails/templates/ExportFailureEmail";
import ExportSuccessEmail from "@server/emails/templates/ExportSuccessEmail";
import Logger from "@server/logging/logger";
import { FileOperation, Collection, Event, Team, User } from "@server/models";
import { FileOperationState } from "@server/models/FileOperation";
import { Event as TEvent } from "@server/types";
import { uploadToS3FromBuffer } from "@server/utils/s3";
import { archiveCollections } from "@server/utils/zip";
@@ -41,7 +42,7 @@ export default class ExportsProcessor extends BaseProcessor {
});
this.updateFileOperation(fileOperation, actorId, teamId, {
state: "creating",
state: FileOperationState.Creating,
});
// heavy lifting of creating the zip file
Logger.info(
@@ -50,7 +51,7 @@ export default class ExportsProcessor extends BaseProcessor {
);
const filePath = await archiveCollections(collections);
let url;
let state: any = "creating";
let state = FileOperationState.Creating;
try {
// @ts-expect-error ts-migrate(2769) FIXME: No overload matches this call.
@@ -58,7 +59,7 @@ export default class ExportsProcessor extends BaseProcessor {
// @ts-expect-error ts-migrate(2769) FIXME: No overload matches this call.
const stat = await fs.promises.stat(filePath);
this.updateFileOperation(fileOperation, actorId, teamId, {
state: "uploading",
state: FileOperationState.Uploading,
size: stat.size,
});
Logger.info(
@@ -75,12 +76,12 @@ export default class ExportsProcessor extends BaseProcessor {
"processor",
`Upload complete for file operation ${fileOperation.id}`
);
state = "complete";
state = FileOperationState.Complete;
} catch (error) {
Logger.error("Error exporting collection data", error, {
fileOperationId: fileOperation.id,
});
state = "error";
state = FileOperationState.Error;
url = undefined;
} finally {
this.updateFileOperation(fileOperation, actorId, teamId, {
@@ -88,7 +89,7 @@ export default class ExportsProcessor extends BaseProcessor {
url,
});
if (state === "error") {
if (state === FileOperationState.Error) {
await ExportFailureEmail.schedule({
to: user.email,
teamUrl: team.url,

View File

@@ -0,0 +1,40 @@
import invariant from "invariant";
import { FileOperation } from "@server/models";
import {
FileOperationFormat,
FileOperationType,
} from "@server/models/FileOperation";
import { Event as TEvent, FileOperationEvent } from "@server/types";
import ImportMarkdownZipTask from "../tasks/ImportMarkdownZipTask";
import ImportNotionTask from "../tasks/ImportNotionTask";
import BaseProcessor from "./BaseProcessor";
export default class FileOperationsProcessor extends BaseProcessor {
static applicableEvents: TEvent["name"][] = ["fileOperations.create"];
async perform(event: FileOperationEvent) {
if (event.name !== "fileOperations.create") {
return;
}
const fileOperation = await FileOperation.findByPk(event.modelId);
invariant(fileOperation, "fileOperation not found");
// map file operation type and format to the appropriate task
if (fileOperation.type === FileOperationType.Import) {
switch (fileOperation.format) {
case FileOperationFormat.MarkdownZip:
await ImportMarkdownZipTask.schedule({
fileOperationId: event.modelId,
});
break;
case FileOperationFormat.Notion:
await ImportNotionTask.schedule({
fileOperationId: event.modelId,
});
break;
default:
}
}
}
}

View File

@@ -1,79 +0,0 @@
import fs from "fs";
import os from "os";
import File from "formidable/lib/file";
import invariant from "invariant";
import collectionImporter from "@server/commands/collectionImporter";
import { Event, FileOperation, Attachment, User } from "@server/models";
import { Event as TEvent } from "@server/types";
import BaseProcessor from "./BaseProcessor";
export default class ImportsProcessor extends BaseProcessor {
static applicableEvents: TEvent["name"][] = ["collections.import"];
async perform(event: TEvent) {
switch (event.name) {
case "collections.import": {
let state, error;
const { type } = event.data;
const attachment = await Attachment.findByPk(event.modelId);
invariant(attachment, "attachment not found");
const user = await User.findByPk(event.actorId);
invariant(user, "user not found");
const fileOperation = await FileOperation.create({
type: "import",
state: "creating",
size: attachment.size,
key: attachment.key,
userId: user.id,
teamId: user.teamId,
});
await Event.schedule({
name: "fileOperations.create",
modelId: fileOperation.id,
teamId: user.teamId,
actorId: user.id,
});
try {
const buffer = await attachment.buffer;
const tmpDir = os.tmpdir();
const tmpFilePath = `${tmpDir}/upload-${event.modelId}`;
await fs.promises.writeFile(tmpFilePath, buffer as Uint8Array);
const file = new File({
name: attachment.name,
type: attachment.contentType,
path: tmpFilePath,
});
await collectionImporter({
file,
user,
type,
ip: event.ip,
});
await attachment.destroy();
state = "complete";
} catch (err) {
state = "error";
error = err.message;
} finally {
await fileOperation.update({ state, error });
await Event.schedule({
name: "fileOperations.update",
modelId: fileOperation.id,
teamId: user.teamId,
actorId: user.id,
});
}
return;
}
default:
}
}
}