+
+
+ It is possible to import a zip file of folders and Markdown files
+ previously exported from an Outline instance. Support will soon be
+ added for importing from other services.
+
+
+
+
+
+ {isImported && (
+
+
+ Your file has been uploaded and the import is currently being
+ processed, you can safely leave this page while it completes.
+
+
+ )}
+ {file && !isImportable && (
+
+ }}
+ />
+
+ )}
+ {file && importDetails && isImportable ? (
+ <>
+
+ }}
+ />
+
+ {importDetails
+ .filter((detail) => detail.type === "collection")
+ .map((detail) => (
+
+
+ {detail.name}
+
+ ))}
+
+
+
+ >
+ ) : (
+
+ )}
+
+
{t("Export")}
+
+ }}
+ />
+
+
+
+ );
+}
+
+const List = styled.ul`
+ padding: 0;
+ margin: 8px 0 0;
+`;
+
+const ImportPreview = styled(Notice)`
+ margin-bottom: 16px;
+`;
+
+const ImportPreviewItem = styled.li`
+ display: flex;
+ align-items: center;
+ list-style: none;
+`;
+
+const CollectionName = styled.span`
+ font-weight: 500;
+ margin-left: 4px;
+`;
+
+export default observer(ImportExport);
diff --git a/app/stores/CollectionsStore.js b/app/stores/CollectionsStore.js
index df29d61e4..6f9c786a5 100644
--- a/app/stores/CollectionsStore.js
+++ b/app/stores/CollectionsStore.js
@@ -1,7 +1,6 @@
// @flow
import { concat, filter, last } from "lodash";
import { computed, action } from "mobx";
-
import naturalSort from "shared/utils/naturalSort";
import Collection from "models/Collection";
import BaseStore from "./BaseStore";
@@ -89,6 +88,13 @@ export default class CollectionsStore extends BaseStore {
}
@action
+ import = async (attachmentId: string) => {
+ await client.post("/collections.import", {
+ type: "outline",
+ attachmentId,
+ });
+ };
+
async update(params: Object): Promise {
const result = await super.update(params);
@@ -116,12 +122,12 @@ export default class CollectionsStore extends BaseStore {
if (path) return path.title;
}
- delete(collection: Collection) {
- super.delete(collection);
+ delete = async (collection: Collection) => {
+ await super.delete(collection);
this.rootStore.documents.fetchRecentlyUpdated();
this.rootStore.documents.fetchRecentlyViewed();
- }
+ };
export = () => {
return client.post("/collections.export_all");
diff --git a/app/utils/uploadFile.js b/app/utils/uploadFile.js
index 09ea37567..8cd7384e2 100644
--- a/app/utils/uploadFile.js
+++ b/app/utils/uploadFile.js
@@ -39,11 +39,13 @@ export const uploadFile = async (
formData.append("file", file);
}
- await fetch(data.uploadUrl, {
+ const uploadResponse = await fetch(data.uploadUrl, {
method: "post",
body: formData,
});
+ invariant(uploadResponse.ok, "Upload failed, try again?");
+
return attachment;
};
diff --git a/server/api/__snapshots__/collections.test.js.snap b/server/api/__snapshots__/collections.test.js.snap
index 95fec2a7c..b3aaf516a 100644
--- a/server/api/__snapshots__/collections.test.js.snap
+++ b/server/api/__snapshots__/collections.test.js.snap
@@ -61,6 +61,15 @@ Object {
}
`;
+exports[`#collections.import should require authentication 1`] = `
+Object {
+ "error": "authentication_required",
+ "message": "Authentication required",
+ "ok": false,
+ "status": 401,
+}
+`;
+
exports[`#collections.info should require authentication 1`] = `
Object {
"error": "authentication_required",
diff --git a/server/api/attachments.js b/server/api/attachments.js
index a2d34d547..0cc3b3770 100644
--- a/server/api/attachments.js
+++ b/server/api/attachments.js
@@ -38,7 +38,7 @@ router.post("attachments.create", auth(), async (ctx) => {
const key = `${bucket}/${user.id}/${s3Key}/${name}`;
const credential = makeCredential();
const longDate = format(new Date(), "YYYYMMDDTHHmmss\\Z");
- const policy = makePolicy(credential, longDate, acl);
+ const policy = makePolicy(credential, longDate, acl, contentType);
const endpoint = publicS3Endpoint();
const url = `${endpoint}/${key}`;
@@ -85,6 +85,7 @@ router.post("attachments.create", auth(), async (ctx) => {
documentId,
contentType,
name,
+ id: attachment.id,
url: attachment.redirectUrl,
size,
},
diff --git a/server/api/collections.js b/server/api/collections.js
index 900a83c04..bb3997a16 100644
--- a/server/api/collections.js
+++ b/server/api/collections.js
@@ -12,6 +12,7 @@ import {
Event,
User,
Group,
+ Attachment,
} from "../models";
import policy from "../policies";
import {
@@ -98,6 +99,31 @@ router.post("collections.info", auth(), async (ctx) => {
};
});
+router.post("collections.import", auth(), async (ctx) => {
+ const { type, attachmentId } = ctx.body;
+ ctx.assertIn(type, ["outline"], "type must be one of 'outline'");
+ ctx.assertUuid(attachmentId, "attachmentId is required");
+
+ const user = ctx.state.user;
+ authorize(user, "import", Collection);
+
+ const attachment = await Attachment.findByPk(attachmentId);
+ authorize(user, "read", attachment);
+
+ await Event.create({
+ name: "collections.import",
+ modelId: attachmentId,
+ teamId: user.teamId,
+ actorId: user.id,
+ data: { type },
+ ip: ctx.request.ip,
+ });
+
+ ctx.body = {
+ success: true,
+ };
+});
+
router.post("collections.add_group", auth(), async (ctx) => {
const { id, groupId, permission = "read_write" } = ctx.body;
ctx.assertUuid(id, "id is required");
diff --git a/server/api/collections.test.js b/server/api/collections.test.js
index 5feef64cc..ce4cd927c 100644
--- a/server/api/collections.test.js
+++ b/server/api/collections.test.js
@@ -9,6 +9,7 @@ import {
buildDocument,
} from "../test/factories";
import { flushdb, seed } from "../test/support";
+
const server = new TestServer(app.callback());
beforeEach(() => flushdb());
@@ -109,6 +110,26 @@ describe("#collections.list", () => {
});
});
+describe("#collections.import", () => {
+ it("should error if no attachmentId is passed", async () => {
+ const user = await buildUser();
+ const res = await server.post("/api/collections.import", {
+ body: {
+ token: user.getJwtToken(),
+ },
+ });
+ expect(res.status).toEqual(400);
+ });
+
+ it("should require authentication", async () => {
+ const res = await server.post("/api/collections.import");
+ const body = await res.json();
+
+ expect(res.status).toEqual(401);
+ expect(body).toMatchSnapshot();
+ });
+});
+
describe("#collections.export", () => {
it("should now allow export of private collection not a member", async () => {
const { user } = await seed();
diff --git a/server/api/documents.js b/server/api/documents.js
index 500848899..b8d5f3f66 100644
--- a/server/api/documents.js
+++ b/server/api/documents.js
@@ -2,6 +2,7 @@
import Router from "koa-router";
import Sequelize from "sequelize";
import { subtractDate } from "../../shared/utils/date";
+import documentCreator from "../commands/documentCreator";
import documentImporter from "../commands/documentImporter";
import documentMover from "../commands/documentMover";
import {
@@ -865,30 +866,6 @@ router.post("documents.unstar", auth(), async (ctx) => {
};
});
-router.post("documents.create", auth(), createDocumentFromContext);
-router.post("documents.import", auth(), async (ctx) => {
- if (!ctx.is("multipart/form-data")) {
- throw new InvalidRequestError("Request type must be multipart/form-data");
- }
-
- const file: any = Object.values(ctx.request.files)[0];
- ctx.assertPresent(file, "file is required");
-
- const user = ctx.state.user;
- authorize(user, "create", Document);
-
- const { text, title } = await documentImporter({
- user,
- file,
- ip: ctx.request.ip,
- });
-
- ctx.body.text = text;
- ctx.body.title = title;
-
- await createDocumentFromContext(ctx);
-});
-
router.post("documents.templatize", auth(), async (ctx) => {
const { id } = ctx.body;
ctx.assertPresent(id, "id is required");
@@ -1170,8 +1147,73 @@ router.post("documents.unpublish", auth(), async (ctx) => {
};
});
-// TODO: update to actual `ctx` type
-export async function createDocumentFromContext(ctx: any) {
+router.post("documents.import", auth(), async (ctx) => {
+ const { publish, collectionId, parentDocumentId, index } = ctx.body;
+
+ if (!ctx.is("multipart/form-data")) {
+ throw new InvalidRequestError("Request type must be multipart/form-data");
+ }
+
+ const file: any = Object.values(ctx.request.files)[0];
+ ctx.assertPresent(file, "file is required");
+
+ ctx.assertUuid(collectionId, "collectionId must be an uuid");
+ if (parentDocumentId) {
+ ctx.assertUuid(parentDocumentId, "parentDocumentId must be an uuid");
+ }
+
+ if (index) ctx.assertPositiveInteger(index, "index must be an integer (>=0)");
+
+ const user = ctx.state.user;
+ authorize(user, "create", Document);
+
+ const collection = await Collection.scope({
+ method: ["withMembership", user.id],
+ }).findOne({
+ where: {
+ id: collectionId,
+ teamId: user.teamId,
+ },
+ });
+ authorize(user, "publish", collection);
+
+ let parentDocument;
+ if (parentDocumentId) {
+ parentDocument = await Document.findOne({
+ where: {
+ id: parentDocumentId,
+ collectionId: collection.id,
+ },
+ });
+ authorize(user, "read", parentDocument, { collection });
+ }
+
+ const { text, title } = await documentImporter({
+ user,
+ file,
+ ip: ctx.request.ip,
+ });
+
+ const document = await documentCreator({
+ source: "import",
+ title,
+ text,
+ publish,
+ collectionId,
+ parentDocumentId,
+ index,
+ user,
+ ip: ctx.request.ip,
+ });
+ document.collection = collection;
+
+ return (ctx.body = {
+ data: await presentDocument(document),
+ policies: presentPolicies(user, [document]),
+ });
+});
+
+router.post("documents.create", auth(), async (ctx) => {
const {
title = "",
text = "",
@@ -1221,56 +1263,25 @@ export async function createDocumentFromContext(ctx: any) {
authorize(user, "read", templateDocument);
}
- let document = await Document.create({
+ const document = await documentCreator({
+ title,
+ text,
+ publish,
+ collectionId,
parentDocumentId,
- editorVersion,
- collectionId: collection.id,
- teamId: user.teamId,
- userId: user.id,
- lastModifiedById: user.id,
- createdById: user.id,
+ templateDocument,
template,
- templateId: templateDocument ? templateDocument.id : undefined,
- title: templateDocument ? templateDocument.title : title,
- text: templateDocument ? templateDocument.text : text,
- });
-
- await Event.create({
- name: "documents.create",
- documentId: document.id,
- collectionId: document.collectionId,
- teamId: document.teamId,
- actorId: user.id,
- data: { title: document.title, templateId },
+ index,
+ user,
+ editorVersion,
ip: ctx.request.ip,
});
-
- if (publish) {
- await document.publish(user.id);
-
- await Event.create({
- name: "documents.publish",
- documentId: document.id,
- collectionId: document.collectionId,
- teamId: document.teamId,
- actorId: user.id,
- data: { title: document.title },
- ip: ctx.request.ip,
- });
- }
-
- // reload to get all of the data needed to present (user, collection etc)
- // we need to specify publishedAt to bypass default scope that only returns
- // published documents
- document = await Document.findOne({
- where: { id: document.id, publishedAt: document.publishedAt },
- });
document.collection = collection;
return (ctx.body = {
data: await presentDocument(document),
policies: presentPolicies(user, [document]),
});
-}
+});
export default router;
diff --git a/server/api/documents.test.js b/server/api/documents.test.js
index a8ac18f13..9b8dacf65 100644
--- a/server/api/documents.test.js
+++ b/server/api/documents.test.js
@@ -1629,6 +1629,14 @@ describe("#documents.import", () => {
});
expect(res.status).toEqual(400);
});
+
+ it("should require authentication", async () => {
+ const { document } = await seed();
+ const res = await server.post("/api/documents.import", {
+ body: { id: document.id },
+ });
+ expect(res.status).toEqual(401);
+ });
});
describe("#documents.create", () => {
@@ -1648,6 +1656,7 @@ describe("#documents.create", () => {
expect(res.status).toEqual(200);
expect(newDocument.parentDocumentId).toBe(null);
expect(newDocument.collectionId).toBe(collection.id);
+ expect(body.policies[0].abilities.update).toEqual(true);
});
it("should not allow very long titles", async () => {
@@ -1680,6 +1689,7 @@ describe("#documents.create", () => {
expect(res.status).toEqual(200);
expect(body.data.title).toBe("new document");
+ expect(body.policies[0].abilities.update).toEqual(true);
});
it("should error with invalid parentDocument", async () => {
@@ -1714,6 +1724,7 @@ describe("#documents.create", () => {
expect(res.status).toEqual(200);
expect(body.data.title).toBe("new document");
+ expect(body.policies[0].abilities.update).toEqual(true);
});
});
diff --git a/server/commands/attachmentCreator.js b/server/commands/attachmentCreator.js
new file mode 100644
index 000000000..906054cbb
--- /dev/null
+++ b/server/commands/attachmentCreator.js
@@ -0,0 +1,45 @@
+// @flow
+import uuid from "uuid";
+import { Attachment, Event, User } from "../models";
+import { uploadToS3FromBuffer } from "../utils/s3";
+
+export default async function attachmentCreator({
+ name,
+ type,
+ buffer,
+ user,
+ source,
+ ip,
+}: {
+ name: string,
+ type: string,
+ buffer: Buffer,
+ user: User,
+ source?: "import",
+ ip: string,
+}) {
+ const key = `uploads/${user.id}/${uuid.v4()}/${name}`;
+ const acl = process.env.AWS_S3_ACL || "private";
+ const url = await uploadToS3FromBuffer(buffer, type, key, acl);
+
+ const attachment = await Attachment.create({
+ key,
+ acl,
+ url,
+ size: buffer.length,
+ contentType: type,
+ teamId: user.teamId,
+ userId: user.id,
+ });
+
+ await Event.create({
+ name: "attachments.create",
+ data: { name, source },
+ modelId: attachment.id,
+ teamId: user.teamId,
+ actorId: user.id,
+ ip,
+ });
+
+ return attachment;
+}
diff --git a/server/commands/collectionImporter.js b/server/commands/collectionImporter.js
new file mode 100644
index 000000000..d073213bf
--- /dev/null
+++ b/server/commands/collectionImporter.js
@@ -0,0 +1,194 @@
+// @flow
+import fs from "fs";
+import os from "os";
+import path from "path";
+import debug from "debug";
+import File from "formidable/lib/file";
+import invariant from "invariant";
+import { values, keys } from "lodash";
+import uuid from "uuid";
+import { parseOutlineExport } from "../../shared/utils/zip";
+import { FileImportError } from "../errors";
+import { Attachment, Event, Document, Collection, User } from "../models";
+import attachmentCreator from "./attachmentCreator";
+import documentCreator from "./documentCreator";
+import documentImporter from "./documentImporter";
+
+const log = debug("commands");
+
+export default async function collectionImporter({
+ file,
+ type,
+ user,
+ ip,
+}: {
+ file: File,
+ user: User,
+ type: "outline",
+ ip: string,
+}) {
+ // load the zip structure into memory
+ const zipData = await fs.promises.readFile(file.path);
+
+ let items;
+ try {
+ items = await await parseOutlineExport(zipData);
+ } catch (err) {
+ throw new FileImportError(err.message);
+ }
+
+ if (!items.filter((item) => item.type === "document").length) {
+ throw new FileImportError(
+ "Uploaded file does not contain importable documents"
+ );
+ }
+
+ // store progress and pointers
+ let collections: { string: Collection } = {};
+ let documents: { string: Document } = {};
+ let attachments: { string: Attachment } = {};
+
+ for (const item of items) {
+ if (item.type === "collection") {
+ // check if collection with name exists
+ let [collection, isCreated] = await Collection.findOrCreate({
+ where: {
+ teamId: user.teamId,
+ name: item.name,
+ },
+ defaults: {
+ createdById: user.id,
+ private: false,
+ },
+ });
+
+ // create new collection if name already exists, yes it's possible that
+ // there is also a "Name (Imported)" but this is a case not worth dealing
+ // with right now
+ if (!isCreated) {
+ const name = `${item.name} (Imported)`;
+ collection = await Collection.create({
+ teamId: user.teamId,
+ createdById: user.id,
+ name,
+ private: false,
+ });
+ await Event.create({
+ name: "collections.create",
+ collectionId: collection.id,
+ teamId: collection.teamId,
+ actorId: user.id,
+ data: { name },
+ ip,
+ });
+ }
+
+ collections[item.path] = collection;
+ continue;
+ }
+
+ if (item.type === "document") {
+ const collectionDir = item.dir.split("/")[0];
+ const collection = collections[collectionDir];
+ invariant(collection, `Collection must exist for document ${item.dir}`);
+
+ // we have a document
+ const content = await item.item.async("string");
+ const name = path.basename(item.name);
+ const tmpDir = os.tmpdir();
+ const tmpFilePath = `${tmpDir}/upload-${uuid.v4()}`;
+
+ await fs.promises.writeFile(tmpFilePath, content);
+ const file = new File({
+ name,
+ type: "text/markdown",
+ path: tmpFilePath,
+ });
+
+ const { text, title } = await documentImporter({
+ file,
+ user,
+ ip,
+ });
+
+ await fs.promises.unlink(tmpFilePath);
+
+ // must be a nested document, find and reference the parent document
+ let parentDocumentId;
+ if (item.depth > 1) {
+ const parentDocument =
+ documents[`${item.dir}.md`] || documents[item.dir];
+ invariant(parentDocument, `Document must exist for parent ${item.dir}`);
+ parentDocumentId = parentDocument.id;
+ }
+
+ const document = await documentCreator({
+ source: "import",
+ title,
+ text,
+ publish: true,
+ collectionId: collection.id,
+ createdAt: item.metadata.createdAt
+ ? new Date(item.metadata.createdAt)
+ : item.date,
+ updatedAt: item.date,
+ parentDocumentId,
+ user,
+ ip,
+ });
+
+ documents[item.path] = document;
+ continue;
+ }
+
+ if (item.type === "attachment") {
+ const buffer = await item.item.async("nodebuffer");
+ const attachment = await attachmentCreator({
+ source: "import",
+ name: item.name,
+ type,
+ buffer,
+ user,
+ ip,
+ });
+ attachments[item.path] = attachment;
+ continue;
+ }
+
+ log(`Skipped importing ${item.path}`);
+ }
+
+ // All collections, documents, and attachments have been created – time to
+ // update the documents to point to newly uploaded attachments where possible
+ for (const attachmentPath of keys(attachments)) {
+ const attachment = attachments[attachmentPath];
+
+ for (const document of values(documents)) {
+ // pull the collection and subdirectory out of the path name, upload folders
+ // in an Outline export are relative to the document itself
+ const normalizedAttachmentPath = attachmentPath.replace(
+ /(.*)uploads\//,
+ "uploads/"
+ );
+
+ document.text = document.text
+ .replace(attachmentPath, attachment.redirectUrl)
+ .replace(normalizedAttachmentPath, attachment.redirectUrl)
+ .replace(`/${normalizedAttachmentPath}`, attachment.redirectUrl);
+
+ // does nothing if the document text is unchanged
+ await document.save({ fields: ["text"] });
+ }
+ }
+
+ // reload collections to get document mapping
+ for (const collection of values(collections)) {
+ await collection.reload();
+ }
+
+ return {
+ documents: values(documents),
+ collections: values(collections),
+ attachments: values(attachments),
+ };
+}
diff --git a/server/commands/collectionImporter.test.js b/server/commands/collectionImporter.test.js
new file mode 100644
index 000000000..950d18f32
--- /dev/null
+++ b/server/commands/collectionImporter.test.js
@@ -0,0 +1,90 @@
+// @flow
+import path from "path";
+import File from "formidable/lib/file";
+import { Attachment, Document, Collection } from "../models";
+import { buildUser } from "../test/factories";
+import { flushdb } from "../test/support";
+import collectionImporter from "./collectionImporter";
+
+jest.mock("../utils/s3");
+
+beforeEach(() => flushdb());
+
+describe("collectionImporter", () => {
+ const ip = "127.0.0.1";
+
+ it("should import documents in outline format", async () => {
+ const user = await buildUser();
+ const name = "outline.zip";
+ const file = new File({
+ name,
+ type: "application/zip",
+ path: path.resolve(__dirname, "..", "test", "fixtures", name),
+ });
+
+ const response = await collectionImporter({
+ type: "outline",
+ user,
+ file,
+ ip,
+ });
+
+ expect(response.collections.length).toEqual(1);
+ expect(response.documents.length).toEqual(8);
+ expect(response.attachments.length).toEqual(6);
+
+ expect(await Collection.count()).toEqual(1);
+ expect(await Document.count()).toEqual(8);
+ expect(await Attachment.count()).toEqual(6);
+ });
+
+ it("should throw an error with corrupt zip", async () => {
+ const user = await buildUser();
+ const name = "corrupt.zip";
+ const file = new File({
+ name,
+ type: "application/zip",
+ path: path.resolve(__dirname, "..", "test", "fixtures", name),
+ });
+
+ let error;
+ try {
+ await collectionImporter({
+ type: "outline",
+ user,
+ file,
+ ip,
+ });
+ } catch (err) {
+ error = err;
+ }
+
+ expect(error && error.message).toBeTruthy();
+ });
+
+ it("should throw an error with empty zip", async () => {
+ const user = await buildUser();
+ const name = "empty.zip";
+ const file = new File({
+ name,
+ type: "application/zip",
+ path: path.resolve(__dirname, "..", "test", "fixtures", name),
+ });
+
+ let error;
+ try {
+ await collectionImporter({
+ type: "outline",
+ user,
+ file,
+ ip,
+ });
+ } catch (err) {
+ error = err;
+ }
+
+ expect(error && error.message).toBe(
+ "Uploaded file does not contain importable documents"
+ );
+ });
+});
diff --git a/server/commands/documentCreator.js b/server/commands/documentCreator.js
new file mode 100644
index 000000000..f7501a185
--- /dev/null
+++ b/server/commands/documentCreator.js
@@ -0,0 +1,82 @@
+// @flow
+import { Document, Event, User } from "../models";
+
+export default async function documentCreator({
+ title = "",
+ text = "",
+ publish,
+ collectionId,
+ parentDocumentId,
+ templateDocument,
+ createdAt, // allows override for import
+ updatedAt,
+ template,
+ index,
+ user,
+ editorVersion,
+ source,
+ ip,
+}: {
+ title: string,
+ text: string,
+ publish?: boolean,
+ collectionId: string,
+ parentDocumentId?: string,
+ templateDocument?: Document,
+ template?: boolean,
+ createdAt?: Date,
+ updatedAt?: Date,
+ index?: number,
+ user: User,
+ editorVersion?: string,
+ source?: "import",
+ ip: string,
+}): Document {
+ const templateId = templateDocument ? templateDocument.id : undefined;
+ let document = await Document.create({
+ parentDocumentId,
+ editorVersion,
+ collectionId,
+ teamId: user.teamId,
+ userId: user.id,
+ createdAt,
+ updatedAt,
+ lastModifiedById: user.id,
+ createdById: user.id,
+ template,
+ templateId,
+ title: templateDocument ? templateDocument.title : title,
+ text: templateDocument ? templateDocument.text : text,
+ });
+
+ await Event.create({
+ name: "documents.create",
+ documentId: document.id,
+ collectionId: document.collectionId,
+ teamId: document.teamId,
+ actorId: user.id,
+ data: { source, title: document.title, templateId },
+ ip,
+ });
+
+ if (publish) {
+ await document.publish(user.id);
+
+ await Event.create({
+ name: "documents.publish",
+ documentId: document.id,
+ collectionId: document.collectionId,
+ teamId: document.teamId,
+ actorId: user.id,
+ data: { source, title: document.title },
+ ip,
+ });
+ }
+
+ // reload to get all of the data needed to present (user, collection etc)
+ // we need to specify publishedAt to bypass default scope that only returns
+ // published documents
+ return Document.findOne({
+ where: { id: document.id, publishedAt: document.publishedAt },
+ });
+}
diff --git a/server/commands/documentImporter.js b/server/commands/documentImporter.js
index a38dee6c8..cb8ff43da 100644
--- a/server/commands/documentImporter.js
+++ b/server/commands/documentImporter.js
@@ -7,13 +7,12 @@ import mammoth from "mammoth";
import quotedPrintable from "quoted-printable";
import TurndownService from "turndown";
import utf8 from "utf8";
-import uuid from "uuid";
import parseTitle from "../../shared/utils/parseTitle";
import { FileImportError, InvalidRequestError } from "../errors";
-import { Attachment, Event, User } from "../models";
+import { User } from "../models";
import dataURItoBuffer from "../utils/dataURItoBuffer";
import parseImages from "../utils/parseImages";
-import { uploadToS3FromBuffer } from "../utils/s3";
+import attachmentCreator from "./attachmentCreator";
// https://github.com/domchristie/turndown#options
const turndownService = new TurndownService({
@@ -170,26 +169,13 @@ export default async function documentImporter({
for (const uri of dataURIs) {
const name = "imported";
- const key = `uploads/${user.id}/${uuid.v4()}/${name}`;
- const acl = process.env.AWS_S3_ACL || "private";
const { buffer, type } = dataURItoBuffer(uri);
- const url = await uploadToS3FromBuffer(buffer, type, key, acl);
- const attachment = await Attachment.create({
- key,
- acl,
- url,
- size: buffer.length,
- contentType: type,
- teamId: user.teamId,
- userId: user.id,
- });
-
- await Event.create({
- name: "attachments.create",
- data: { name },
- teamId: user.teamId,
- userId: user.id,
+ const attachment = await attachmentCreator({
+ name,
+ type,
+ buffer,
+ user,
ip,
});
diff --git a/server/events.js b/server/events.js
index f60181c5c..a3d72d9e9 100644
--- a/server/events.js
+++ b/server/events.js
@@ -47,6 +47,10 @@ export type DocumentEvent =
teamId: string,
actorId: string,
ip: string,
+ data: {
+ title: string,
+ source?: "import",
+ },
}
| {
name: "documents.move",
@@ -97,6 +101,15 @@ export type RevisionEvent = {
teamId: string,
};
+export type CollectionImportEvent = {
+ name: "collections.import",
+ modelId: string,
+ teamId: string,
+ actorId: string,
+ data: { type: "outline" },
+ ip: string,
+};
+
export type CollectionEvent =
| {
name: | "collections.create" // eslint-disable-line
@@ -163,6 +176,7 @@ export type Event =
| UserEvent
| DocumentEvent
| CollectionEvent
+ | CollectionImportEvent
| IntegrationEvent
| GroupEvent
| RevisionEvent
diff --git a/server/models/Attachment.js b/server/models/Attachment.js
index e5792ece3..fdb3d779e 100644
--- a/server/models/Attachment.js
+++ b/server/models/Attachment.js
@@ -1,7 +1,7 @@
// @flow
import path from "path";
import { DataTypes, sequelize } from "../sequelize";
-import { deleteFromS3 } from "../utils/s3";
+import { deleteFromS3, getFileByKey } from "../utils/s3";
const Attachment = sequelize.define(
"attachment",
@@ -47,6 +47,9 @@ const Attachment = sequelize.define(
isPrivate: function () {
return this.acl === "private";
},
+ buffer: function () {
+ return getFileByKey(this.key);
+ },
},
}
);
diff --git a/server/policies/attachment.js b/server/policies/attachment.js
index d7105402d..28fdb8fe5 100644
--- a/server/policies/attachment.js
+++ b/server/policies/attachment.js
@@ -6,7 +6,7 @@ const { allow } = policy;
allow(User, "create", Attachment);
-allow(User, "delete", Attachment, (actor, attachment) => {
+allow(User, ["read", "delete"], Attachment, (actor, attachment) => {
if (!attachment || attachment.teamId !== actor.teamId) return false;
if (actor.isAdmin) return true;
if (actor.id === attachment.userId) return true;
diff --git a/server/policies/collection.js b/server/policies/collection.js
index e72a94f98..5e3bdd8ef 100644
--- a/server/policies/collection.js
+++ b/server/policies/collection.js
@@ -9,6 +9,11 @@ const { allow } = policy;
allow(User, "create", Collection);
+allow(User, "import", Collection, (actor) => {
+ if (actor.isAdmin) return true;
+ throw new AdminRequiredError();
+});
+
allow(User, ["read", "export"], Collection, (user, collection) => {
if (!collection || user.teamId !== collection.teamId) return false;
diff --git a/server/services/importer.js b/server/services/importer.js
new file mode 100644
index 000000000..133e1b941
--- /dev/null
+++ b/server/services/importer.js
@@ -0,0 +1,42 @@
+// @flow
+import fs from "fs";
+import os from "os";
+import File from "formidable/lib/file";
+import collectionImporter from "../commands/collectionImporter";
+import type { Event } from "../events";
+import { Attachment, User } from "../models";
+
+export default class Importer {
+ async on(event: Event) {
+ switch (event.name) {
+ case "collections.import": {
+ const { type } = event.data;
+ const attachment = await Attachment.findByPk(event.modelId);
+ const user = await User.findByPk(event.actorId);
+
+ const buffer = await attachment.buffer;
+ const tmpDir = os.tmpdir();
+ const tmpFilePath = `${tmpDir}/upload-${event.modelId}`;
+
+ await fs.promises.writeFile(tmpFilePath, buffer);
+ const file = new File({
+ name: attachment.name,
+ type: attachment.type,
+ path: tmpFilePath,
+ });
+
+ await collectionImporter({
+ file,
+ user,
+ type,
+ ip: event.ip,
+ });
+
+ await attachment.destroy();
+
+ return;
+ }
+ default:
+ }
+ }
+}
diff --git a/server/services/notifications.js b/server/services/notifications.js
index 9629ad40a..2cd9d8339 100644
--- a/server/services/notifications.js
+++ b/server/services/notifications.js
@@ -27,6 +27,9 @@ export default class Notifications {
}
async documentUpdated(event: DocumentEvent) {
+ // never send notifications when batch importing documents
+ if (event.data && event.data.source === "import") return;
+
const document = await Document.findByPk(event.documentId);
if (!document) return;
diff --git a/server/services/slack.js b/server/services/slack.js
index 3b552eb6c..fd29ec132 100644
--- a/server/services/slack.js
+++ b/server/services/slack.js
@@ -55,6 +55,9 @@ export default class Slack {
}
async documentUpdated(event: DocumentEvent) {
+ // never send notifications when batch importing documents
+ if (event.data && event.data.source === "import") return;
+
const document = await Document.findByPk(event.documentId);
if (!document) return;
diff --git a/server/test/fixtures/corrupt.zip b/server/test/fixtures/corrupt.zip
new file mode 100644
index 000000000..9056068e5
--- /dev/null
+++ b/server/test/fixtures/corrupt.zip
@@ -0,0 +1 @@
+CORRUPT
\ No newline at end of file
diff --git a/server/test/fixtures/empty.zip b/server/test/fixtures/empty.zip
new file mode 100644
index 000000000..92cef7a07
Binary files /dev/null and b/server/test/fixtures/empty.zip differ
diff --git a/server/test/fixtures/outline.zip b/server/test/fixtures/outline.zip
new file mode 100644
index 000000000..030598dcb
Binary files /dev/null and b/server/test/fixtures/outline.zip differ
diff --git a/server/utils/s3.js b/server/utils/s3.js
index 7cb294a01..ece56262d 100644
--- a/server/utils/s3.js
+++ b/server/utils/s3.js
@@ -46,7 +46,8 @@ export const makeCredential = () => {
export const makePolicy = (
credential: string,
longDate: string,
- acl: string
+ acl: string,
+ contentType: string = "image"
) => {
const tomorrow = addHours(new Date(), 24);
const policy = {
@@ -55,7 +56,7 @@ export const makePolicy = (
["starts-with", "$key", ""],
{ acl },
["content-length-range", 0, +process.env.AWS_S3_UPLOAD_MAX_SIZE],
- ["starts-with", "$Content-Type", "image"],
+ ["starts-with", "$Content-Type", contentType],
["starts-with", "$Cache-Control", ""],
{ "x-amz-algorithm": "AWS4-HMAC-SHA256" },
{ "x-amz-credential": credential },
@@ -177,7 +178,7 @@ export const getSignedImageUrl = async (key: string) => {
: s3.getSignedUrl("getObject", params);
};
-export const getImageByKey = async (key: string) => {
+export const getFileByKey = async (key: string) => {
const params = {
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
diff --git a/server/utils/zip.js b/server/utils/zip.js
index 0c6650959..4aaf13669 100644
--- a/server/utils/zip.js
+++ b/server/utils/zip.js
@@ -4,7 +4,7 @@ import * as Sentry from "@sentry/node";
import JSZip from "jszip";
import tmp from "tmp";
import { Attachment, Collection, Document } from "../models";
-import { getImageByKey } from "./s3";
+import { getFileByKey } from "./s3";
async function addToArchive(zip, documents) {
for (const doc of documents) {
@@ -20,7 +20,14 @@ async function addToArchive(zip, documents) {
text = text.replace(attachment.redirectUrl, encodeURI(attachment.key));
}
- zip.file(`${document.title || "Untitled"}.md`, text);
+ zip.file(`${document.title || "Untitled"}.md`, text, {
+ date: document.updatedAt,
+ comment: JSON.stringify({
+ pinned: document.pinned,
+ createdAt: document.createdAt,
+ updatedAt: document.updatedAt,
+ }),
+ });
if (doc.children && doc.children.length) {
const folder = zip.folder(document.title);
@@ -31,7 +38,7 @@ async function addToArchive(zip, documents) {
async function addImageToArchive(zip, key) {
try {
- const img = await getImageByKey(key);
+ const img = await getFileByKey(key);
zip.file(key, img, { createFolders: true });
} catch (err) {
if (process.env.SENTRY_DSN) {
@@ -60,7 +67,8 @@ export async function archiveCollection(collection: Collection) {
const zip = new JSZip();
if (collection.documentStructure) {
- await addToArchive(zip, collection.documentStructure);
+ const folder = zip.folder(collection.name);
+ await addToArchive(folder, collection.documentStructure);
}
return archiveToPath(zip);
diff --git a/shared/i18n/locales/en_US/translation.json b/shared/i18n/locales/en_US/translation.json
index b0f510bfc..cf7d909c1 100644
--- a/shared/i18n/locales/en_US/translation.json
+++ b/shared/i18n/locales/en_US/translation.json
@@ -109,7 +109,8 @@
"People": "People",
"Groups": "Groups",
"Share Links": "Share Links",
- "Export Data": "Export Data",
+ "Import": "Import",
+ "Export": "Export",
"Integrations": "Integrations",
"Installation": "Installation",
"Unstar": "Unstar",
@@ -132,7 +133,6 @@
"New document": "New document",
"Import document": "Import document",
"Edit": "Edit",
- "Export": "Export",
"Delete": "Delete",
"Collection members": "Collection members",
"Edit collection": "Edit collection",
@@ -306,6 +306,19 @@
"Use the {{ meta }}+K shortcut to search from anywhere in your knowledge base": "Use the {{ meta }}+K shortcut to search from anywhere in your knowledge base",
"No documents found for your search filters. <1>1>Create a new document?": "No documents found for your search filters. <1>1>Create a new document?",
"Clear filters": "Clear filters",
+ "Import started": "Import started",
+ "Export in progress…": "Export in progress…",
+ "It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.": "It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.",
+ "Your file has been uploaded and the import is currently being processed, you can safely leave this page while it completes.": "Your file has been uploaded and the import is currently being processed, you can safely leave this page while it completes.",
+ "Sorry, the file {{ fileName }} is missing valid collections or documents.": "Sorry, the file {{ fileName }} is missing valid collections or documents.",
+ "{{ fileName }} looks good, the following collections and their documents will be imported:": "{{ fileName }} looks good, the following collections and their documents will be imported:",
+ "Uploading": "Uploading",
+ "Confirm & Import": "Confirm & Import",
+ "Choose File": "Choose File",
+ "A full export might take some time, consider exporting a single document or collection if possible. We’ll put together a zip of all your documents in Markdown format and email it to {{ userEmail }}.": "A full export might take some time, consider exporting a single document or collection if possible. We’ll put together a zip of all your documents in Markdown format and email it to {{ userEmail }}.",
+ "Export Requested": "Export Requested",
+ "Requesting Export": "Requesting Export",
+ "Export Data": "Export Data",
"Everyone that has signed into Outline appears here. It’s possible that there are other users who have access through {team.signinMethods} but haven’t signed in yet.": "Everyone that has signed into Outline appears here. It’s possible that there are other users who have access through {team.signinMethods} but haven’t signed in yet.",
"Active": "Active",
"Admins": "Admins",
diff --git a/shared/utils/zip.js b/shared/utils/zip.js
new file mode 100644
index 000000000..d6464f3f2
--- /dev/null
+++ b/shared/utils/zip.js
@@ -0,0 +1,76 @@
+// @flow
+import path from "path";
+import JSZip, { ZipObject } from "jszip";
+
+export type Item = {|
+ path: string,
+ dir: string,
+ name: string,
+ depth: number,
+ metadata: Object,
+ type: "collection" | "document" | "attachment",
+ item: ZipObject,
+|};
+
+export async function parseOutlineExport(
+ input: File | Buffer
+): Promise {
+ const zip = await JSZip.loadAsync(input);
+
+ // this is so we can use async / await a little easier
+ let items: Item[] = [];
+ zip.forEach(async function (rawPath, item) {
+ const itemPath = rawPath.replace(/\/$/, "");
+ const dir = path.dirname(itemPath);
+ const name = path.basename(item.name);
+ const depth = itemPath.split("/").length - 1;
+
+ // known skippable items
+ if (itemPath.startsWith("__MACOSX") || itemPath.endsWith(".DS_Store")) {
+ return;
+ }
+
+ // attempt to parse extra metadata from zip comment
+ let metadata = {};
+ try {
+ metadata = item.comment ? JSON.parse(item.comment) : {};
+ } catch (err) {
+ console.log(
+ `ZIP comment found for ${item.name}, but could not be parsed as metadata: ${item.comment}`
+ );
+ }
+
+ if (depth === 0 && !item.dir) {
+ throw new Error(
+ "Root of zip file must only contain folders representing collections"
+ );
+ }
+
+ let type;
+ if (depth === 0 && item.dir && name) {
+ type = "collection";
+ }
+ if (depth > 0 && !item.dir && item.name.endsWith(".md")) {
+ type = "document";
+ }
+ if (depth > 0 && !item.dir && itemPath.includes("uploads")) {
+ type = "attachment";
+ }
+
+ if (!type) {
+ return;
+ }
+
+ items.push({
+ path: itemPath,
+ dir,
+ name,
+ depth,
+ type,
+ metadata,
+ item,
+ });
+ });
+
+ return items;
+}