feat: Import improvements (#3064)
* feat: Split and simplify import/export pages in prep for more options * minor fixes * File operations for imports * test * icons
This commit is contained in:
@@ -5,9 +5,9 @@ import File from "formidable/lib/file";
|
||||
import invariant from "invariant";
|
||||
import { values, keys } from "lodash";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { parseOutlineExport } from "@shared/utils/zip";
|
||||
import Logger from "@server/logging/logger";
|
||||
import { Attachment, Event, Document, Collection, User } from "@server/models";
|
||||
import { parseOutlineExport, Item } from "@server/utils/zip";
|
||||
import { FileImportError } from "../errors";
|
||||
import attachmentCreator from "./attachmentCreator";
|
||||
import documentCreator from "./documentCreator";
|
||||
@@ -30,10 +30,10 @@ export default async function collectionImporter({
|
||||
}) {
|
||||
// load the zip structure into memory
|
||||
const zipData = await fs.promises.readFile(file.path);
|
||||
let items;
|
||||
let items: Item[];
|
||||
|
||||
try {
|
||||
items = await await parseOutlineExport(zipData);
|
||||
items = await parseOutlineExport(zipData);
|
||||
} catch (err) {
|
||||
throw FileImportError(err.message);
|
||||
}
|
||||
|
||||
@@ -2,14 +2,14 @@ import { sequelize } from "@server/database/sequelize";
|
||||
import { FileOperation, Event, User } from "@server/models";
|
||||
|
||||
export default async function fileOperationDeleter(
|
||||
fileOp: FileOperation,
|
||||
fileOperation: FileOperation,
|
||||
user: User,
|
||||
ip: string
|
||||
) {
|
||||
const transaction = await sequelize.transaction();
|
||||
|
||||
try {
|
||||
await fileOp.destroy({
|
||||
await fileOperation.destroy({
|
||||
transaction,
|
||||
});
|
||||
await Event.create(
|
||||
@@ -17,8 +17,7 @@ export default async function fileOperationDeleter(
|
||||
name: "fileOperations.delete",
|
||||
teamId: user.teamId,
|
||||
actorId: user.id,
|
||||
// @ts-expect-error dataValues does exist
|
||||
data: fileOp.dataValues,
|
||||
modelId: fileOperation.id,
|
||||
ip,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
up: async (queryInterface, Sequelize) => {
|
||||
await queryInterface.addColumn("file_operations", "error", {
|
||||
type: Sequelize.STRING,
|
||||
allowNull: true,
|
||||
});
|
||||
},
|
||||
down: async (queryInterface) => {
|
||||
await queryInterface.removeColumn("file_operations", "error");
|
||||
},
|
||||
};
|
||||
@@ -45,6 +45,9 @@ class FileOperation extends BaseModel {
|
||||
@Column
|
||||
url: string;
|
||||
|
||||
@Column
|
||||
error: string | null;
|
||||
|
||||
@Column(DataType.BIGINT)
|
||||
size: number;
|
||||
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import path from "path";
|
||||
import { FileOperation } from "@server/models";
|
||||
import { presentCollection, presentUser } from ".";
|
||||
import { presentUser } from ".";
|
||||
|
||||
export default function present(data: FileOperation) {
|
||||
return {
|
||||
id: data.id,
|
||||
type: data.type,
|
||||
name: data.collection?.name || path.basename(data.key || ""),
|
||||
state: data.state,
|
||||
collection: data.collection ? presentCollection(data.collection) : null,
|
||||
error: data.error,
|
||||
size: data.size,
|
||||
collectionId: data.collectionId,
|
||||
user: presentUser(data.user),
|
||||
createdAt: data.createdAt,
|
||||
};
|
||||
|
||||
@@ -19,60 +19,62 @@ export default class ExportsProcessor {
|
||||
const user = await User.findByPk(actorId);
|
||||
invariant(user, "user operation not found");
|
||||
|
||||
const exportData = await FileOperation.findByPk(event.modelId);
|
||||
invariant(exportData, "exportData not found");
|
||||
const fileOperation = await FileOperation.findByPk(event.modelId);
|
||||
invariant(fileOperation, "fileOperation not found");
|
||||
|
||||
const collectionIds =
|
||||
// @ts-expect-error ts-migrate(2339) FIXME: Property 'collectionId' does not exist on type 'Co... Remove this comment to see the full error message
|
||||
event.collectionId || (await user.collectionIds());
|
||||
"collectionId" in event
|
||||
? event.collectionId
|
||||
: await user.collectionIds();
|
||||
const collections = await Collection.findAll({
|
||||
where: {
|
||||
id: collectionIds,
|
||||
},
|
||||
});
|
||||
this.updateFileOperation(exportData, actorId, teamId, {
|
||||
this.updateFileOperation(fileOperation, actorId, teamId, {
|
||||
state: "creating",
|
||||
});
|
||||
// heavy lifting of creating the zip file
|
||||
Logger.info(
|
||||
"processor",
|
||||
`Archiving collections for file operation ${exportData.id}`
|
||||
`Archiving collections for file operation ${fileOperation.id}`
|
||||
);
|
||||
const filePath = await archiveCollections(collections);
|
||||
let url, state;
|
||||
let url;
|
||||
let state: any = "creating";
|
||||
|
||||
try {
|
||||
// @ts-expect-error ts-migrate(2769) FIXME: No overload matches this call.
|
||||
const readBuffer = await fs.promises.readFile(filePath);
|
||||
// @ts-expect-error ts-migrate(2769) FIXME: No overload matches this call.
|
||||
const stat = await fs.promises.stat(filePath);
|
||||
this.updateFileOperation(exportData, actorId, teamId, {
|
||||
this.updateFileOperation(fileOperation, actorId, teamId, {
|
||||
state: "uploading",
|
||||
size: stat.size,
|
||||
});
|
||||
Logger.info(
|
||||
"processor",
|
||||
`Uploading archive for file operation ${exportData.id}`
|
||||
`Uploading archive for file operation ${fileOperation.id}`
|
||||
);
|
||||
url = await uploadToS3FromBuffer(
|
||||
readBuffer,
|
||||
"application/zip",
|
||||
exportData.key,
|
||||
fileOperation.key,
|
||||
"private"
|
||||
);
|
||||
Logger.info(
|
||||
"processor",
|
||||
`Upload complete for file operation ${exportData.id}`
|
||||
`Upload complete for file operation ${fileOperation.id}`
|
||||
);
|
||||
state = "complete";
|
||||
} catch (error) {
|
||||
Logger.error("Error exporting collection data", error, {
|
||||
fileOperationId: exportData.id,
|
||||
fileOperationId: fileOperation.id,
|
||||
});
|
||||
state = "error";
|
||||
url = null;
|
||||
url = undefined;
|
||||
} finally {
|
||||
this.updateFileOperation(exportData, actorId, teamId, {
|
||||
this.updateFileOperation(fileOperation, actorId, teamId, {
|
||||
state,
|
||||
url,
|
||||
});
|
||||
@@ -85,7 +87,7 @@ export default class ExportsProcessor {
|
||||
} else {
|
||||
mailer.sendTemplate("exportSuccess", {
|
||||
to: user.email,
|
||||
id: exportData.id,
|
||||
id: fileOperation.id,
|
||||
teamUrl: team.url,
|
||||
});
|
||||
}
|
||||
@@ -101,15 +103,14 @@ export default class ExportsProcessor {
|
||||
fileOperation: FileOperation,
|
||||
actorId: string,
|
||||
teamId: string,
|
||||
data: Record<string, any>
|
||||
data: Partial<FileOperation>
|
||||
) {
|
||||
await fileOperation.update(data);
|
||||
await Event.add({
|
||||
name: "fileOperations.update",
|
||||
teamId,
|
||||
actorId,
|
||||
// @ts-expect-error dataValues exists
|
||||
data: fileOperation.dataValues,
|
||||
modelId: fileOperation.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,13 +3,14 @@ import os from "os";
|
||||
import File from "formidable/lib/file";
|
||||
import invariant from "invariant";
|
||||
import collectionImporter from "@server/commands/collectionImporter";
|
||||
import { Attachment, User } from "@server/models";
|
||||
import { Event } from "../../types";
|
||||
import { Event, FileOperation, Attachment, User } from "@server/models";
|
||||
import { Event as TEvent } from "../../types";
|
||||
|
||||
export default class ImportsProcessor {
|
||||
async on(event: Event) {
|
||||
async on(event: TEvent) {
|
||||
switch (event.name) {
|
||||
case "collections.import": {
|
||||
let state, error;
|
||||
const { type } = event.data;
|
||||
const attachment = await Attachment.findByPk(event.modelId);
|
||||
invariant(attachment, "attachment not found");
|
||||
@@ -17,22 +18,55 @@ export default class ImportsProcessor {
|
||||
const user = await User.findByPk(event.actorId);
|
||||
invariant(user, "user not found");
|
||||
|
||||
const buffer: any = await attachment.buffer;
|
||||
const tmpDir = os.tmpdir();
|
||||
const tmpFilePath = `${tmpDir}/upload-${event.modelId}`;
|
||||
await fs.promises.writeFile(tmpFilePath, buffer);
|
||||
const file = new File({
|
||||
name: attachment.name,
|
||||
type: attachment.contentType,
|
||||
path: tmpFilePath,
|
||||
const fileOperation = await FileOperation.create({
|
||||
type: "import",
|
||||
state: "creating",
|
||||
size: attachment.size,
|
||||
key: attachment.key,
|
||||
userId: user.id,
|
||||
teamId: user.teamId,
|
||||
});
|
||||
await collectionImporter({
|
||||
file,
|
||||
user,
|
||||
type,
|
||||
ip: event.ip,
|
||||
|
||||
await Event.add({
|
||||
name: "fileOperations.create",
|
||||
modelId: fileOperation.id,
|
||||
teamId: user.teamId,
|
||||
actorId: user.id,
|
||||
});
|
||||
await attachment.destroy();
|
||||
|
||||
try {
|
||||
const buffer = await attachment.buffer;
|
||||
const tmpDir = os.tmpdir();
|
||||
const tmpFilePath = `${tmpDir}/upload-${event.modelId}`;
|
||||
await fs.promises.writeFile(tmpFilePath, buffer as Uint8Array);
|
||||
const file = new File({
|
||||
name: attachment.name,
|
||||
type: attachment.contentType,
|
||||
path: tmpFilePath,
|
||||
});
|
||||
|
||||
await collectionImporter({
|
||||
file,
|
||||
user,
|
||||
type,
|
||||
ip: event.ip,
|
||||
});
|
||||
await attachment.destroy();
|
||||
|
||||
state = "complete";
|
||||
} catch (err) {
|
||||
state = "error";
|
||||
error = err.message;
|
||||
} finally {
|
||||
await fileOperation.update({ state, error });
|
||||
await Event.add({
|
||||
name: "fileOperations.update",
|
||||
modelId: fileOperation.id,
|
||||
teamId: user.teamId,
|
||||
actorId: user.id,
|
||||
});
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -3,13 +3,18 @@ import { Op } from "sequelize";
|
||||
import {
|
||||
Document,
|
||||
Collection,
|
||||
FileOperation,
|
||||
Group,
|
||||
CollectionGroup,
|
||||
GroupUser,
|
||||
Pin,
|
||||
Star,
|
||||
} from "@server/models";
|
||||
import { presentPin, presentStar } from "@server/presenters";
|
||||
import {
|
||||
presentFileOperation,
|
||||
presentPin,
|
||||
presentStar,
|
||||
} from "@server/presenters";
|
||||
import { Event } from "../../types";
|
||||
|
||||
export default class WebsocketsProcessor {
|
||||
@@ -354,10 +359,14 @@ export default class WebsocketsProcessor {
|
||||
return;
|
||||
}
|
||||
|
||||
case "fileOperations.create":
|
||||
case "fileOperations.update": {
|
||||
return socketio
|
||||
.to(`user-${event.actorId}`)
|
||||
.emit("fileOperations.update", event.data);
|
||||
const fileOperation = await FileOperation.findByPk(event.modelId);
|
||||
if (!fileOperation) {
|
||||
return;
|
||||
}
|
||||
const data = await presentFileOperation(fileOperation);
|
||||
return socketio.to(`user-${event.actorId}`).emit(event.name, data);
|
||||
}
|
||||
|
||||
case "pins.create":
|
||||
|
||||
@@ -118,7 +118,7 @@ describe("#fileOperations.list", () => {
|
||||
expect(data.id).toBe(exportData.id);
|
||||
expect(data.key).toBe(undefined);
|
||||
expect(data.state).toBe(exportData.state);
|
||||
expect(data.collection.id).toBe(collection.id);
|
||||
expect(data.collectionId).toBe(collection.id);
|
||||
});
|
||||
|
||||
it("should return exports with collection data even if collection is deleted", async () => {
|
||||
@@ -152,7 +152,7 @@ describe("#fileOperations.list", () => {
|
||||
expect(data.id).toBe(exportData.id);
|
||||
expect(data.key).toBe(undefined);
|
||||
expect(data.state).toBe(exportData.state);
|
||||
expect(data.collection.id).toBe(collection.id);
|
||||
expect(data.collectionId).toBe(collection.id);
|
||||
});
|
||||
|
||||
it("should return exports with user data even if user is deleted", async () => {
|
||||
|
||||
@@ -130,9 +130,13 @@ export type CollectionExportAllEvent = {
|
||||
};
|
||||
|
||||
export type FileOperationEvent = {
|
||||
name: "fileOperations.update" | "fileOperation.delete";
|
||||
name:
|
||||
| "fileOperations.create"
|
||||
| "fileOperations.update"
|
||||
| "fileOperation.delete";
|
||||
teamId: string;
|
||||
actorId: string;
|
||||
modelId: string;
|
||||
data: {
|
||||
type: string;
|
||||
state: string;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from "fs";
|
||||
import JSZip from "jszip";
|
||||
import path from "path";
|
||||
import JSZip, { JSZipObject } from "jszip";
|
||||
import tmp from "tmp";
|
||||
import Logger from "@server/logging/logger";
|
||||
import Attachment from "@server/models/Attachment";
|
||||
@@ -10,6 +11,18 @@ import { serializeFilename } from "./fs";
|
||||
import parseAttachmentIds from "./parseAttachmentIds";
|
||||
import { getFileByKey } from "./s3";
|
||||
|
||||
type ItemType = "collection" | "document" | "attachment";
|
||||
|
||||
export type Item = {
|
||||
path: string;
|
||||
dir: string;
|
||||
name: string;
|
||||
depth: number;
|
||||
metadata: Record<string, any>;
|
||||
type: ItemType;
|
||||
item: JSZipObject;
|
||||
};
|
||||
|
||||
async function addToArchive(zip: JSZip, documents: NavigationNode[]) {
|
||||
for (const doc of documents) {
|
||||
const document = await Document.findByPk(doc.id);
|
||||
@@ -104,3 +117,78 @@ export async function archiveCollections(collections: Collection[]) {
|
||||
|
||||
return archiveToPath(zip);
|
||||
}
|
||||
|
||||
export async function parseOutlineExport(
|
||||
input: File | Buffer
|
||||
): Promise<Item[]> {
|
||||
const zip = await JSZip.loadAsync(input);
|
||||
// this is so we can use async / await a little easier
|
||||
const items: Item[] = [];
|
||||
|
||||
for (const rawPath in zip.files) {
|
||||
const item = zip.files[rawPath];
|
||||
|
||||
if (!item) {
|
||||
throw new Error(
|
||||
`No item at ${rawPath} in zip file. This zip file might be corrupt.`
|
||||
);
|
||||
}
|
||||
|
||||
const itemPath = rawPath.replace(/\/$/, "");
|
||||
const dir = path.dirname(itemPath);
|
||||
const name = path.basename(item.name);
|
||||
const depth = itemPath.split("/").length - 1;
|
||||
|
||||
// known skippable items
|
||||
if (itemPath.startsWith("__MACOSX") || itemPath.endsWith(".DS_Store")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// attempt to parse extra metadata from zip comment
|
||||
let metadata = {};
|
||||
|
||||
try {
|
||||
metadata = item.comment ? JSON.parse(item.comment) : {};
|
||||
} catch (err) {
|
||||
console.log(
|
||||
`ZIP comment found for ${item.name}, but could not be parsed as metadata: ${item.comment}`
|
||||
);
|
||||
}
|
||||
|
||||
if (depth === 0 && !item.dir) {
|
||||
throw new Error(
|
||||
"Root of zip file must only contain folders representing collections"
|
||||
);
|
||||
}
|
||||
|
||||
let type: ItemType | undefined;
|
||||
|
||||
if (depth === 0 && item.dir && name) {
|
||||
type = "collection";
|
||||
}
|
||||
|
||||
if (depth > 0 && !item.dir && item.name.endsWith(".md")) {
|
||||
type = "document";
|
||||
}
|
||||
|
||||
if (depth > 0 && !item.dir && itemPath.includes("uploads")) {
|
||||
type = "attachment";
|
||||
}
|
||||
|
||||
if (!type) {
|
||||
continue;
|
||||
}
|
||||
|
||||
items.push({
|
||||
path: itemPath,
|
||||
dir,
|
||||
name,
|
||||
depth,
|
||||
type,
|
||||
metadata,
|
||||
item,
|
||||
});
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user