feat: Batch Import (#1747)

closes #1846
closes #914
This commit is contained in:
Tom Moor
2021-02-20 12:36:05 -08:00
committed by GitHub
30 changed files with 985 additions and 109 deletions

View File

@@ -112,9 +112,9 @@ function SettingsSidebar() {
/>
{can.export && (
<SidebarLink
to="/settings/export"
to="/settings/import-export"
icon={<DocumentIcon color="currentColor" />}
label={t("Export Data")}
label={`${t("Import")} / ${t("Export")}`}
/>
)}
</Section>

View File

@@ -3,8 +3,8 @@ import * as React from "react";
import { Switch } from "react-router-dom";
import Settings from "scenes/Settings";
import Details from "scenes/Settings/Details";
import Export from "scenes/Settings/Export";
import Groups from "scenes/Settings/Groups";
import ImportExport from "scenes/Settings/ImportExport";
import Notifications from "scenes/Settings/Notifications";
import People from "scenes/Settings/People";
import Security from "scenes/Settings/Security";
@@ -28,7 +28,7 @@ export default function SettingsRoutes() {
<Route exact path="/settings/notifications" component={Notifications} />
<Route exact path="/settings/integrations/slack" component={Slack} />
<Route exact path="/settings/integrations/zapier" component={Zapier} />
<Route exact path="/settings/export" component={Export} />
<Route exact path="/settings/import-export" component={ImportExport} />
</Switch>
);
}

View File

@@ -0,0 +1,223 @@
// @flow
import invariant from "invariant";
import { observer } from "mobx-react";
import { CollectionIcon } from "outline-icons";
import * as React from "react";
import { useTranslation, Trans } from "react-i18next";
import { VisuallyHidden } from "reakit/VisuallyHidden";
import styled from "styled-components";
import { parseOutlineExport } from "shared/utils/zip";
import Button from "components/Button";
import CenteredContent from "components/CenteredContent";
import HelpText from "components/HelpText";
import Notice from "components/Notice";
import PageTitle from "components/PageTitle";
import useCurrentUser from "hooks/useCurrentUser";
import useStores from "hooks/useStores";
import getDataTransferFiles from "utils/getDataTransferFiles";
import { uploadFile } from "utils/uploadFile";
function ImportExport() {
const { t } = useTranslation();
const user = useCurrentUser();
const fileRef = React.useRef();
const { ui, collections } = useStores();
const { showToast } = ui;
const [isLoading, setLoading] = React.useState(false);
const [isImporting, setImporting] = React.useState(false);
const [isImported, setImported] = React.useState(false);
const [isExporting, setExporting] = React.useState(false);
const [file, setFile] = React.useState();
const [importDetails, setImportDetails] = React.useState();
const handleImport = React.useCallback(
async (ev) => {
setImported(undefined);
setImporting(true);
try {
invariant(file, "File must exist to upload");
const attachment = await uploadFile(file, {
name: file.name,
});
await collections.import(attachment.id);
showToast(t("Import started"));
setImported(true);
} catch (err) {
showToast(err.message);
} finally {
if (fileRef.current) {
fileRef.current.value = "";
}
setImporting(false);
setFile(undefined);
setImportDetails(undefined);
}
},
[t, file, collections, showToast]
);
const handleFilePicked = React.useCallback(async (ev) => {
ev.preventDefault();
const files = getDataTransferFiles(ev);
const file = files[0];
setFile(file);
try {
setImportDetails(await parseOutlineExport(file));
} catch (err) {
setImportDetails([]);
}
}, []);
const handlePickFile = React.useCallback(
(ev) => {
ev.preventDefault();
if (fileRef.current) {
fileRef.current.click();
}
},
[fileRef]
);
const handleExport = React.useCallback(
async (ev: SyntheticEvent<>) => {
ev.preventDefault();
setLoading(true);
try {
await collections.export();
setExporting(true);
showToast(t("Export in progress…"));
} finally {
setLoading(false);
}
},
[t, collections, showToast]
);
const hasCollections = importDetails
? !!importDetails.filter((detail) => detail.type === "collection").length
: false;
const hasDocuments = importDetails
? !!importDetails.filter((detail) => detail.type === "document").length
: false;
const isImportable = hasCollections && hasDocuments;
return (
<CenteredContent>
<PageTitle title={`${t("Import")} / ${t("Export")}`} />
<h1>{t("Import")}</h1>
<HelpText>
<Trans>
It is possible to import a zip file of folders and Markdown files
previously exported from an Outline instance. Support will soon be
added for importing from other services.
</Trans>
</HelpText>
<VisuallyHidden>
<input
type="file"
ref={fileRef}
onChange={handleFilePicked}
accept="application/zip"
/>
</VisuallyHidden>
{isImported && (
<Notice>
<Trans>
Your file has been uploaded and the import is currently being
processed, you can safely leave this page while it completes.
</Trans>
</Notice>
)}
{file && !isImportable && (
<ImportPreview>
<Trans
defaults="Sorry, the file <em>{{ fileName }}</em> is missing valid collections or documents."
values={{ fileName: file.name }}
components={{ em: <strong /> }}
/>
</ImportPreview>
)}
{file && importDetails && isImportable ? (
<>
<ImportPreview as="div">
<Trans
defaults="<em>{{ fileName }}</em> looks good, the following collections and their documents will be imported:"
values={{ fileName: file.name }}
components={{ em: <strong /> }}
/>
<List>
{importDetails
.filter((detail) => detail.type === "collection")
.map((detail) => (
<ImportPreviewItem key={detail.path}>
<CollectionIcon />
<CollectionName>{detail.name}</CollectionName>
</ImportPreviewItem>
))}
</List>
</ImportPreview>
<Button
type="submit"
onClick={handleImport}
disabled={isImporting}
primary
>
{isImporting ? `${t("Uploading")}` : t("Confirm & Import")}
</Button>
</>
) : (
<Button type="submit" onClick={handlePickFile} primary>
{t("Choose File")}
</Button>
)}
<h1>{t("Export")}</h1>
<HelpText>
<Trans
defaults="A full export might take some time, consider exporting a single document or collection if possible. Well put together a zip of all your documents in Markdown format and email it to <em>{{ userEmail }}</em>."
values={{ userEmail: user.email }}
components={{ em: <strong /> }}
/>
</HelpText>
<Button
type="submit"
onClick={handleExport}
disabled={isLoading || isExporting}
primary
>
{isExporting
? t("Export Requested")
: isLoading
? `${t("Requesting Export")}`
: t("Export Data")}
</Button>
</CenteredContent>
);
}
const List = styled.ul`
padding: 0;
margin: 8px 0 0;
`;
const ImportPreview = styled(Notice)`
margin-bottom: 16px;
`;
const ImportPreviewItem = styled.li`
display: flex;
align-items: center;
list-style: none;
`;
const CollectionName = styled.span`
font-weight: 500;
margin-left: 4px;
`;
export default observer(ImportExport);

View File

@@ -1,7 +1,6 @@
// @flow
import { concat, filter, last } from "lodash";
import { computed, action } from "mobx";
import naturalSort from "shared/utils/naturalSort";
import Collection from "models/Collection";
import BaseStore from "./BaseStore";
@@ -89,6 +88,13 @@ export default class CollectionsStore extends BaseStore<Collection> {
}
@action
import = async (attachmentId: string) => {
await client.post("/collections.import", {
type: "outline",
attachmentId,
});
};
async update(params: Object): Promise<Collection> {
const result = await super.update(params);
@@ -116,12 +122,12 @@ export default class CollectionsStore extends BaseStore<Collection> {
if (path) return path.title;
}
delete(collection: Collection) {
super.delete(collection);
delete = async (collection: Collection) => {
await super.delete(collection);
this.rootStore.documents.fetchRecentlyUpdated();
this.rootStore.documents.fetchRecentlyViewed();
}
};
export = () => {
return client.post("/collections.export_all");

View File

@@ -39,11 +39,13 @@ export const uploadFile = async (
formData.append("file", file);
}
await fetch(data.uploadUrl, {
const uploadResponse = await fetch(data.uploadUrl, {
method: "post",
body: formData,
});
invariant(uploadResponse.ok, "Upload failed, try again?");
return attachment;
};

View File

@@ -61,6 +61,15 @@ Object {
}
`;
exports[`#collections.import should require authentication 1`] = `
Object {
"error": "authentication_required",
"message": "Authentication required",
"ok": false,
"status": 401,
}
`;
exports[`#collections.info should require authentication 1`] = `
Object {
"error": "authentication_required",

View File

@@ -38,7 +38,7 @@ router.post("attachments.create", auth(), async (ctx) => {
const key = `${bucket}/${user.id}/${s3Key}/${name}`;
const credential = makeCredential();
const longDate = format(new Date(), "YYYYMMDDTHHmmss\\Z");
const policy = makePolicy(credential, longDate, acl);
const policy = makePolicy(credential, longDate, acl, contentType);
const endpoint = publicS3Endpoint();
const url = `${endpoint}/${key}`;
@@ -85,6 +85,7 @@ router.post("attachments.create", auth(), async (ctx) => {
documentId,
contentType,
name,
id: attachment.id,
url: attachment.redirectUrl,
size,
},

View File

@@ -12,6 +12,7 @@ import {
Event,
User,
Group,
Attachment,
} from "../models";
import policy from "../policies";
import {
@@ -98,6 +99,31 @@ router.post("collections.info", auth(), async (ctx) => {
};
});
router.post("collections.import", auth(), async (ctx) => {
const { type, attachmentId } = ctx.body;
ctx.assertIn(type, ["outline"], "type must be one of 'outline'");
ctx.assertUuid(attachmentId, "attachmentId is required");
const user = ctx.state.user;
authorize(user, "import", Collection);
const attachment = await Attachment.findByPk(attachmentId);
authorize(user, "read", attachment);
await Event.create({
name: "collections.import",
modelId: attachmentId,
teamId: user.teamId,
actorId: user.id,
data: { type },
ip: ctx.request.ip,
});
ctx.body = {
success: true,
};
});
router.post("collections.add_group", auth(), async (ctx) => {
const { id, groupId, permission = "read_write" } = ctx.body;
ctx.assertUuid(id, "id is required");

View File

@@ -9,6 +9,7 @@ import {
buildDocument,
} from "../test/factories";
import { flushdb, seed } from "../test/support";
const server = new TestServer(app.callback());
beforeEach(() => flushdb());
@@ -109,6 +110,26 @@ describe("#collections.list", () => {
});
});
describe("#collections.import", () => {
it("should error if no attachmentId is passed", async () => {
const user = await buildUser();
const res = await server.post("/api/collections.import", {
body: {
token: user.getJwtToken(),
},
});
expect(res.status).toEqual(400);
});
it("should require authentication", async () => {
const res = await server.post("/api/collections.import");
const body = await res.json();
expect(res.status).toEqual(401);
expect(body).toMatchSnapshot();
});
});
describe("#collections.export", () => {
it("should now allow export of private collection not a member", async () => {
const { user } = await seed();

View File

@@ -2,6 +2,7 @@
import Router from "koa-router";
import Sequelize from "sequelize";
import { subtractDate } from "../../shared/utils/date";
import documentCreator from "../commands/documentCreator";
import documentImporter from "../commands/documentImporter";
import documentMover from "../commands/documentMover";
import {
@@ -865,30 +866,6 @@ router.post("documents.unstar", auth(), async (ctx) => {
};
});
router.post("documents.create", auth(), createDocumentFromContext);
router.post("documents.import", auth(), async (ctx) => {
if (!ctx.is("multipart/form-data")) {
throw new InvalidRequestError("Request type must be multipart/form-data");
}
const file: any = Object.values(ctx.request.files)[0];
ctx.assertPresent(file, "file is required");
const user = ctx.state.user;
authorize(user, "create", Document);
const { text, title } = await documentImporter({
user,
file,
ip: ctx.request.ip,
});
ctx.body.text = text;
ctx.body.title = title;
await createDocumentFromContext(ctx);
});
router.post("documents.templatize", auth(), async (ctx) => {
const { id } = ctx.body;
ctx.assertPresent(id, "id is required");
@@ -1170,8 +1147,73 @@ router.post("documents.unpublish", auth(), async (ctx) => {
};
});
// TODO: update to actual `ctx` type
export async function createDocumentFromContext(ctx: any) {
router.post("documents.import", auth(), async (ctx) => {
const { publish, collectionId, parentDocumentId, index } = ctx.body;
if (!ctx.is("multipart/form-data")) {
throw new InvalidRequestError("Request type must be multipart/form-data");
}
const file: any = Object.values(ctx.request.files)[0];
ctx.assertPresent(file, "file is required");
ctx.assertUuid(collectionId, "collectionId must be an uuid");
if (parentDocumentId) {
ctx.assertUuid(parentDocumentId, "parentDocumentId must be an uuid");
}
if (index) ctx.assertPositiveInteger(index, "index must be an integer (>=0)");
const user = ctx.state.user;
authorize(user, "create", Document);
const collection = await Collection.scope({
method: ["withMembership", user.id],
}).findOne({
where: {
id: collectionId,
teamId: user.teamId,
},
});
authorize(user, "publish", collection);
let parentDocument;
if (parentDocumentId) {
parentDocument = await Document.findOne({
where: {
id: parentDocumentId,
collectionId: collection.id,
},
});
authorize(user, "read", parentDocument, { collection });
}
const { text, title } = await documentImporter({
user,
file,
ip: ctx.request.ip,
});
const document = await documentCreator({
source: "import",
title,
text,
publish,
collectionId,
parentDocumentId,
index,
user,
ip: ctx.request.ip,
});
document.collection = collection;
return (ctx.body = {
data: await presentDocument(document),
policies: presentPolicies(user, [document]),
});
});
router.post("documents.create", auth(), async (ctx) => {
const {
title = "",
text = "",
@@ -1221,56 +1263,25 @@ export async function createDocumentFromContext(ctx: any) {
authorize(user, "read", templateDocument);
}
let document = await Document.create({
const document = await documentCreator({
title,
text,
publish,
collectionId,
parentDocumentId,
editorVersion,
collectionId: collection.id,
teamId: user.teamId,
userId: user.id,
lastModifiedById: user.id,
createdById: user.id,
templateDocument,
template,
templateId: templateDocument ? templateDocument.id : undefined,
title: templateDocument ? templateDocument.title : title,
text: templateDocument ? templateDocument.text : text,
});
await Event.create({
name: "documents.create",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: user.id,
data: { title: document.title, templateId },
index,
user,
editorVersion,
ip: ctx.request.ip,
});
if (publish) {
await document.publish(user.id);
await Event.create({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: user.id,
data: { title: document.title },
ip: ctx.request.ip,
});
}
// reload to get all of the data needed to present (user, collection etc)
// we need to specify publishedAt to bypass default scope that only returns
// published documents
document = await Document.findOne({
where: { id: document.id, publishedAt: document.publishedAt },
});
document.collection = collection;
return (ctx.body = {
data: await presentDocument(document),
policies: presentPolicies(user, [document]),
});
}
});
export default router;

View File

@@ -1629,6 +1629,14 @@ describe("#documents.import", () => {
});
expect(res.status).toEqual(400);
});
it("should require authentication", async () => {
const { document } = await seed();
const res = await server.post("/api/documents.import", {
body: { id: document.id },
});
expect(res.status).toEqual(401);
});
});
describe("#documents.create", () => {
@@ -1648,6 +1656,7 @@ describe("#documents.create", () => {
expect(res.status).toEqual(200);
expect(newDocument.parentDocumentId).toBe(null);
expect(newDocument.collectionId).toBe(collection.id);
expect(body.policies[0].abilities.update).toEqual(true);
});
it("should not allow very long titles", async () => {
@@ -1680,6 +1689,7 @@ describe("#documents.create", () => {
expect(res.status).toEqual(200);
expect(body.data.title).toBe("new document");
expect(body.policies[0].abilities.update).toEqual(true);
});
it("should error with invalid parentDocument", async () => {
@@ -1714,6 +1724,7 @@ describe("#documents.create", () => {
expect(res.status).toEqual(200);
expect(body.data.title).toBe("new document");
expect(body.policies[0].abilities.update).toEqual(true);
});
});

View File

@@ -0,0 +1,45 @@
// @flow
import uuid from "uuid";
import { Attachment, Event, User } from "../models";
import { uploadToS3FromBuffer } from "../utils/s3";
export default async function attachmentCreator({
name,
type,
buffer,
user,
source,
ip,
}: {
name: string,
type: string,
buffer: Buffer,
user: User,
source?: "import",
ip: string,
}) {
const key = `uploads/${user.id}/${uuid.v4()}/${name}`;
const acl = process.env.AWS_S3_ACL || "private";
const url = await uploadToS3FromBuffer(buffer, type, key, acl);
const attachment = await Attachment.create({
key,
acl,
url,
size: buffer.length,
contentType: type,
teamId: user.teamId,
userId: user.id,
});
await Event.create({
name: "attachments.create",
data: { name, source },
modelId: attachment.id,
teamId: user.teamId,
actorId: user.id,
ip,
});
return attachment;
}

View File

@@ -0,0 +1,194 @@
// @flow
import fs from "fs";
import os from "os";
import path from "path";
import debug from "debug";
import File from "formidable/lib/file";
import invariant from "invariant";
import { values, keys } from "lodash";
import uuid from "uuid";
import { parseOutlineExport } from "../../shared/utils/zip";
import { FileImportError } from "../errors";
import { Attachment, Event, Document, Collection, User } from "../models";
import attachmentCreator from "./attachmentCreator";
import documentCreator from "./documentCreator";
import documentImporter from "./documentImporter";
const log = debug("commands");
export default async function collectionImporter({
file,
type,
user,
ip,
}: {
file: File,
user: User,
type: "outline",
ip: string,
}) {
// load the zip structure into memory
const zipData = await fs.promises.readFile(file.path);
let items;
try {
items = await await parseOutlineExport(zipData);
} catch (err) {
throw new FileImportError(err.message);
}
if (!items.filter((item) => item.type === "document").length) {
throw new FileImportError(
"Uploaded file does not contain importable documents"
);
}
// store progress and pointers
let collections: { string: Collection } = {};
let documents: { string: Document } = {};
let attachments: { string: Attachment } = {};
for (const item of items) {
if (item.type === "collection") {
// check if collection with name exists
let [collection, isCreated] = await Collection.findOrCreate({
where: {
teamId: user.teamId,
name: item.name,
},
defaults: {
createdById: user.id,
private: false,
},
});
// create new collection if name already exists, yes it's possible that
// there is also a "Name (Imported)" but this is a case not worth dealing
// with right now
if (!isCreated) {
const name = `${item.name} (Imported)`;
collection = await Collection.create({
teamId: user.teamId,
createdById: user.id,
name,
private: false,
});
await Event.create({
name: "collections.create",
collectionId: collection.id,
teamId: collection.teamId,
actorId: user.id,
data: { name },
ip,
});
}
collections[item.path] = collection;
continue;
}
if (item.type === "document") {
const collectionDir = item.dir.split("/")[0];
const collection = collections[collectionDir];
invariant(collection, `Collection must exist for document ${item.dir}`);
// we have a document
const content = await item.item.async("string");
const name = path.basename(item.name);
const tmpDir = os.tmpdir();
const tmpFilePath = `${tmpDir}/upload-${uuid.v4()}`;
await fs.promises.writeFile(tmpFilePath, content);
const file = new File({
name,
type: "text/markdown",
path: tmpFilePath,
});
const { text, title } = await documentImporter({
file,
user,
ip,
});
await fs.promises.unlink(tmpFilePath);
// must be a nested document, find and reference the parent document
let parentDocumentId;
if (item.depth > 1) {
const parentDocument =
documents[`${item.dir}.md`] || documents[item.dir];
invariant(parentDocument, `Document must exist for parent ${item.dir}`);
parentDocumentId = parentDocument.id;
}
const document = await documentCreator({
source: "import",
title,
text,
publish: true,
collectionId: collection.id,
createdAt: item.metadata.createdAt
? new Date(item.metadata.createdAt)
: item.date,
updatedAt: item.date,
parentDocumentId,
user,
ip,
});
documents[item.path] = document;
continue;
}
if (item.type === "attachment") {
const buffer = await item.item.async("nodebuffer");
const attachment = await attachmentCreator({
source: "import",
name: item.name,
type,
buffer,
user,
ip,
});
attachments[item.path] = attachment;
continue;
}
log(`Skipped importing ${item.path}`);
}
// All collections, documents, and attachments have been created time to
// update the documents to point to newly uploaded attachments where possible
for (const attachmentPath of keys(attachments)) {
const attachment = attachments[attachmentPath];
for (const document of values(documents)) {
// pull the collection and subdirectory out of the path name, upload folders
// in an Outline export are relative to the document itself
const normalizedAttachmentPath = attachmentPath.replace(
/(.*)uploads\//,
"uploads/"
);
document.text = document.text
.replace(attachmentPath, attachment.redirectUrl)
.replace(normalizedAttachmentPath, attachment.redirectUrl)
.replace(`/${normalizedAttachmentPath}`, attachment.redirectUrl);
// does nothing if the document text is unchanged
await document.save({ fields: ["text"] });
}
}
// reload collections to get document mapping
for (const collection of values(collections)) {
await collection.reload();
}
return {
documents: values(documents),
collections: values(collections),
attachments: values(attachments),
};
}

View File

@@ -0,0 +1,90 @@
// @flow
import path from "path";
import File from "formidable/lib/file";
import { Attachment, Document, Collection } from "../models";
import { buildUser } from "../test/factories";
import { flushdb } from "../test/support";
import collectionImporter from "./collectionImporter";
jest.mock("../utils/s3");
beforeEach(() => flushdb());
describe("collectionImporter", () => {
const ip = "127.0.0.1";
it("should import documents in outline format", async () => {
const user = await buildUser();
const name = "outline.zip";
const file = new File({
name,
type: "application/zip",
path: path.resolve(__dirname, "..", "test", "fixtures", name),
});
const response = await collectionImporter({
type: "outline",
user,
file,
ip,
});
expect(response.collections.length).toEqual(1);
expect(response.documents.length).toEqual(8);
expect(response.attachments.length).toEqual(6);
expect(await Collection.count()).toEqual(1);
expect(await Document.count()).toEqual(8);
expect(await Attachment.count()).toEqual(6);
});
it("should throw an error with corrupt zip", async () => {
const user = await buildUser();
const name = "corrupt.zip";
const file = new File({
name,
type: "application/zip",
path: path.resolve(__dirname, "..", "test", "fixtures", name),
});
let error;
try {
await collectionImporter({
type: "outline",
user,
file,
ip,
});
} catch (err) {
error = err;
}
expect(error && error.message).toBeTruthy();
});
it("should throw an error with empty zip", async () => {
const user = await buildUser();
const name = "empty.zip";
const file = new File({
name,
type: "application/zip",
path: path.resolve(__dirname, "..", "test", "fixtures", name),
});
let error;
try {
await collectionImporter({
type: "outline",
user,
file,
ip,
});
} catch (err) {
error = err;
}
expect(error && error.message).toBe(
"Uploaded file does not contain importable documents"
);
});
});

View File

@@ -0,0 +1,82 @@
// @flow
import { Document, Event, User } from "../models";
export default async function documentCreator({
title = "",
text = "",
publish,
collectionId,
parentDocumentId,
templateDocument,
createdAt, // allows override for import
updatedAt,
template,
index,
user,
editorVersion,
source,
ip,
}: {
title: string,
text: string,
publish?: boolean,
collectionId: string,
parentDocumentId?: string,
templateDocument?: Document,
template?: boolean,
createdAt?: Date,
updatedAt?: Date,
index?: number,
user: User,
editorVersion?: string,
source?: "import",
ip: string,
}): Document {
const templateId = templateDocument ? templateDocument.id : undefined;
let document = await Document.create({
parentDocumentId,
editorVersion,
collectionId,
teamId: user.teamId,
userId: user.id,
createdAt,
updatedAt,
lastModifiedById: user.id,
createdById: user.id,
template,
templateId,
title: templateDocument ? templateDocument.title : title,
text: templateDocument ? templateDocument.text : text,
});
await Event.create({
name: "documents.create",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: user.id,
data: { source, title: document.title, templateId },
ip,
});
if (publish) {
await document.publish(user.id);
await Event.create({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: user.id,
data: { source, title: document.title },
ip,
});
}
// reload to get all of the data needed to present (user, collection etc)
// we need to specify publishedAt to bypass default scope that only returns
// published documents
return Document.findOne({
where: { id: document.id, publishedAt: document.publishedAt },
});
}

View File

@@ -7,13 +7,12 @@ import mammoth from "mammoth";
import quotedPrintable from "quoted-printable";
import TurndownService from "turndown";
import utf8 from "utf8";
import uuid from "uuid";
import parseTitle from "../../shared/utils/parseTitle";
import { FileImportError, InvalidRequestError } from "../errors";
import { Attachment, Event, User } from "../models";
import { User } from "../models";
import dataURItoBuffer from "../utils/dataURItoBuffer";
import parseImages from "../utils/parseImages";
import { uploadToS3FromBuffer } from "../utils/s3";
import attachmentCreator from "./attachmentCreator";
// https://github.com/domchristie/turndown#options
const turndownService = new TurndownService({
@@ -170,26 +169,13 @@ export default async function documentImporter({
for (const uri of dataURIs) {
const name = "imported";
const key = `uploads/${user.id}/${uuid.v4()}/${name}`;
const acl = process.env.AWS_S3_ACL || "private";
const { buffer, type } = dataURItoBuffer(uri);
const url = await uploadToS3FromBuffer(buffer, type, key, acl);
const attachment = await Attachment.create({
key,
acl,
url,
size: buffer.length,
contentType: type,
teamId: user.teamId,
userId: user.id,
});
await Event.create({
name: "attachments.create",
data: { name },
teamId: user.teamId,
userId: user.id,
const attachment = await attachmentCreator({
name,
type,
buffer,
user,
ip,
});

View File

@@ -47,6 +47,10 @@ export type DocumentEvent =
teamId: string,
actorId: string,
ip: string,
data: {
title: string,
source?: "import",
},
}
| {
name: "documents.move",
@@ -97,6 +101,15 @@ export type RevisionEvent = {
teamId: string,
};
export type CollectionImportEvent = {
name: "collections.import",
modelId: string,
teamId: string,
actorId: string,
data: { type: "outline" },
ip: string,
};
export type CollectionEvent =
| {
name: | "collections.create" // eslint-disable-line
@@ -163,6 +176,7 @@ export type Event =
| UserEvent
| DocumentEvent
| CollectionEvent
| CollectionImportEvent
| IntegrationEvent
| GroupEvent
| RevisionEvent

View File

@@ -1,7 +1,7 @@
// @flow
import path from "path";
import { DataTypes, sequelize } from "../sequelize";
import { deleteFromS3 } from "../utils/s3";
import { deleteFromS3, getFileByKey } from "../utils/s3";
const Attachment = sequelize.define(
"attachment",
@@ -47,6 +47,9 @@ const Attachment = sequelize.define(
isPrivate: function () {
return this.acl === "private";
},
buffer: function () {
return getFileByKey(this.key);
},
},
}
);

View File

@@ -6,7 +6,7 @@ const { allow } = policy;
allow(User, "create", Attachment);
allow(User, "delete", Attachment, (actor, attachment) => {
allow(User, ["read", "delete"], Attachment, (actor, attachment) => {
if (!attachment || attachment.teamId !== actor.teamId) return false;
if (actor.isAdmin) return true;
if (actor.id === attachment.userId) return true;

View File

@@ -9,6 +9,11 @@ const { allow } = policy;
allow(User, "create", Collection);
allow(User, "import", Collection, (actor) => {
if (actor.isAdmin) return true;
throw new AdminRequiredError();
});
allow(User, ["read", "export"], Collection, (user, collection) => {
if (!collection || user.teamId !== collection.teamId) return false;

View File

@@ -0,0 +1,42 @@
// @flow
import fs from "fs";
import os from "os";
import File from "formidable/lib/file";
import collectionImporter from "../commands/collectionImporter";
import type { Event } from "../events";
import { Attachment, User } from "../models";
export default class Importer {
async on(event: Event) {
switch (event.name) {
case "collections.import": {
const { type } = event.data;
const attachment = await Attachment.findByPk(event.modelId);
const user = await User.findByPk(event.actorId);
const buffer = await attachment.buffer;
const tmpDir = os.tmpdir();
const tmpFilePath = `${tmpDir}/upload-${event.modelId}`;
await fs.promises.writeFile(tmpFilePath, buffer);
const file = new File({
name: attachment.name,
type: attachment.type,
path: tmpFilePath,
});
await collectionImporter({
file,
user,
type,
ip: event.ip,
});
await attachment.destroy();
return;
}
default:
}
}
}

View File

@@ -27,6 +27,9 @@ export default class Notifications {
}
async documentUpdated(event: DocumentEvent) {
// never send notifications when batch importing documents
if (event.data && event.data.source === "import") return;
const document = await Document.findByPk(event.documentId);
if (!document) return;

View File

@@ -55,6 +55,9 @@ export default class Slack {
}
async documentUpdated(event: DocumentEvent) {
// never send notifications when batch importing documents
if (event.data && event.data.source === "import") return;
const document = await Document.findByPk(event.documentId);
if (!document) return;

1
server/test/fixtures/corrupt.zip vendored Normal file
View File

@@ -0,0 +1 @@
CORRUPT

BIN
server/test/fixtures/empty.zip vendored Normal file

Binary file not shown.

BIN
server/test/fixtures/outline.zip vendored Normal file

Binary file not shown.

View File

@@ -46,7 +46,8 @@ export const makeCredential = () => {
export const makePolicy = (
credential: string,
longDate: string,
acl: string
acl: string,
contentType: string = "image"
) => {
const tomorrow = addHours(new Date(), 24);
const policy = {
@@ -55,7 +56,7 @@ export const makePolicy = (
["starts-with", "$key", ""],
{ acl },
["content-length-range", 0, +process.env.AWS_S3_UPLOAD_MAX_SIZE],
["starts-with", "$Content-Type", "image"],
["starts-with", "$Content-Type", contentType],
["starts-with", "$Cache-Control", ""],
{ "x-amz-algorithm": "AWS4-HMAC-SHA256" },
{ "x-amz-credential": credential },
@@ -177,7 +178,7 @@ export const getSignedImageUrl = async (key: string) => {
: s3.getSignedUrl("getObject", params);
};
export const getImageByKey = async (key: string) => {
export const getFileByKey = async (key: string) => {
const params = {
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,

View File

@@ -4,7 +4,7 @@ import * as Sentry from "@sentry/node";
import JSZip from "jszip";
import tmp from "tmp";
import { Attachment, Collection, Document } from "../models";
import { getImageByKey } from "./s3";
import { getFileByKey } from "./s3";
async function addToArchive(zip, documents) {
for (const doc of documents) {
@@ -20,7 +20,14 @@ async function addToArchive(zip, documents) {
text = text.replace(attachment.redirectUrl, encodeURI(attachment.key));
}
zip.file(`${document.title || "Untitled"}.md`, text);
zip.file(`${document.title || "Untitled"}.md`, text, {
date: document.updatedAt,
comment: JSON.stringify({
pinned: document.pinned,
createdAt: document.createdAt,
updatedAt: document.updatedAt,
}),
});
if (doc.children && doc.children.length) {
const folder = zip.folder(document.title);
@@ -31,7 +38,7 @@ async function addToArchive(zip, documents) {
async function addImageToArchive(zip, key) {
try {
const img = await getImageByKey(key);
const img = await getFileByKey(key);
zip.file(key, img, { createFolders: true });
} catch (err) {
if (process.env.SENTRY_DSN) {
@@ -60,7 +67,8 @@ export async function archiveCollection(collection: Collection) {
const zip = new JSZip();
if (collection.documentStructure) {
await addToArchive(zip, collection.documentStructure);
const folder = zip.folder(collection.name);
await addToArchive(folder, collection.documentStructure);
}
return archiveToPath(zip);

View File

@@ -109,7 +109,8 @@
"People": "People",
"Groups": "Groups",
"Share Links": "Share Links",
"Export Data": "Export Data",
"Import": "Import",
"Export": "Export",
"Integrations": "Integrations",
"Installation": "Installation",
"Unstar": "Unstar",
@@ -132,7 +133,6 @@
"New document": "New document",
"Import document": "Import document",
"Edit": "Edit",
"Export": "Export",
"Delete": "Delete",
"Collection members": "Collection members",
"Edit collection": "Edit collection",
@@ -306,6 +306,19 @@
"Use the <em>{{ meta }}+K</em> shortcut to search from anywhere in your knowledge base": "Use the <em>{{ meta }}+K</em> shortcut to search from anywhere in your knowledge base",
"No documents found for your search filters. <1></1>Create a new document?": "No documents found for your search filters. <1></1>Create a new document?",
"Clear filters": "Clear filters",
"Import started": "Import started",
"Export in progress…": "Export in progress…",
"It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.": "It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.",
"Your file has been uploaded and the import is currently being processed, you can safely leave this page while it completes.": "Your file has been uploaded and the import is currently being processed, you can safely leave this page while it completes.",
"Sorry, the file <em>{{ fileName }}</em> is missing valid collections or documents.": "Sorry, the file <em>{{ fileName }}</em> is missing valid collections or documents.",
"<em>{{ fileName }}</em> looks good, the following collections and their documents will be imported:": "<em>{{ fileName }}</em> looks good, the following collections and their documents will be imported:",
"Uploading": "Uploading",
"Confirm & Import": "Confirm & Import",
"Choose File": "Choose File",
"A full export might take some time, consider exporting a single document or collection if possible. Well put together a zip of all your documents in Markdown format and email it to <em>{{ userEmail }}</em>.": "A full export might take some time, consider exporting a single document or collection if possible. Well put together a zip of all your documents in Markdown format and email it to <em>{{ userEmail }}</em>.",
"Export Requested": "Export Requested",
"Requesting Export": "Requesting Export",
"Export Data": "Export Data",
"Everyone that has signed into Outline appears here. Its possible that there are other users who have access through {team.signinMethods} but havent signed in yet.": "Everyone that has signed into Outline appears here. Its possible that there are other users who have access through {team.signinMethods} but havent signed in yet.",
"Active": "Active",
"Admins": "Admins",

76
shared/utils/zip.js Normal file
View File

@@ -0,0 +1,76 @@
// @flow
import path from "path";
import JSZip, { ZipObject } from "jszip";
export type Item = {|
path: string,
dir: string,
name: string,
depth: number,
metadata: Object,
type: "collection" | "document" | "attachment",
item: ZipObject,
|};
export async function parseOutlineExport(
input: File | Buffer
): Promise<Item[]> {
const zip = await JSZip.loadAsync(input);
// this is so we can use async / await a little easier
let items: Item[] = [];
zip.forEach(async function (rawPath, item) {
const itemPath = rawPath.replace(/\/$/, "");
const dir = path.dirname(itemPath);
const name = path.basename(item.name);
const depth = itemPath.split("/").length - 1;
// known skippable items
if (itemPath.startsWith("__MACOSX") || itemPath.endsWith(".DS_Store")) {
return;
}
// attempt to parse extra metadata from zip comment
let metadata = {};
try {
metadata = item.comment ? JSON.parse(item.comment) : {};
} catch (err) {
console.log(
`ZIP comment found for ${item.name}, but could not be parsed as metadata: ${item.comment}`
);
}
if (depth === 0 && !item.dir) {
throw new Error(
"Root of zip file must only contain folders representing collections"
);
}
let type;
if (depth === 0 && item.dir && name) {
type = "collection";
}
if (depth > 0 && !item.dir && item.name.endsWith(".md")) {
type = "document";
}
if (depth > 0 && !item.dir && itemPath.includes("uploads")) {
type = "attachment";
}
if (!type) {
return;
}
items.push({
path: itemPath,
dir,
name,
depth,
type,
metadata,
item,
});
});
return items;
}