refactor: Upload file to storage, and then pass attachmentId to collections.import

This avoids having large file uploads going directly to the server and allows us to fetch it async into a worker process
This commit is contained in:
Tom Moor
2021-02-18 22:36:07 -08:00
parent 568e271738
commit df233c95a9
12 changed files with 70 additions and 54 deletions

View File

@@ -9,7 +9,7 @@ import { values, keys } from "lodash";
import uuid from "uuid";
import { parseOutlineExport } from "../../shared/utils/zip";
import { FileImportError } from "../errors";
import { Attachment, Document, Collection, User } from "../models";
import { Attachment, Event, Document, Collection, User } from "../models";
import attachmentCreator from "./attachmentCreator";
import documentCreator from "./documentCreator";
import documentImporter from "./documentImporter";
@@ -66,12 +66,21 @@ export default async function collectionImporter({
// there is also a "Name (Imported)" but this is a case not worth dealing
// with right now
if (!isCreated) {
const name = `${item.name} (Imported)`;
collection = await Collection.create({
teamId: user.teamId,
creatorId: user.id,
name: `${item.name} (Imported)`,
name,
private: false,
});
await Event.create({
name: "collections.create",
collectionId: collection.id,
teamId: collection.teamId,
actorId: user.id,
data: { name },
ip,
});
}
collections[item.path] = collection;