fix: Allow selection of embeds (#1562)
* feat: Support importing .docx or .html files as new documents (#1551) * Support importing .docx as new documents * Add html file support, build types and interface for easily adding file types to importer * fix: Upload embedded images in docx to storage * refactor: Bulk of logic to command * refactor: Do all importing on server, so we're not splitting logic for import into two places * test: Add documentImporter tests Co-authored-by: Lance Whatley <whatl3y@gmail.com> * fix: Accessibility audit * fix: Quick fix, non editable title closes #1560 * fix: Embed selection Co-authored-by: Lance Whatley <whatl3y@gmail.com>
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
// @flow
|
||||
import Router from "koa-router";
|
||||
import Sequelize from "sequelize";
|
||||
import documentImporter from "../commands/documentImporter";
|
||||
import documentMover from "../commands/documentMover";
|
||||
import { NotFoundError, InvalidRequestError } from "../errors";
|
||||
import auth from "../middlewares/authentication";
|
||||
@@ -707,106 +708,23 @@ router.post("documents.unstar", auth(), async (ctx) => {
|
||||
};
|
||||
});
|
||||
|
||||
router.post("documents.create", auth(), async (ctx) => {
|
||||
const {
|
||||
title = "",
|
||||
text = "",
|
||||
publish,
|
||||
collectionId,
|
||||
parentDocumentId,
|
||||
templateId,
|
||||
template,
|
||||
index,
|
||||
} = ctx.body;
|
||||
const editorVersion = ctx.headers["x-editor-version"];
|
||||
|
||||
ctx.assertUuid(collectionId, "collectionId must be an uuid");
|
||||
if (parentDocumentId) {
|
||||
ctx.assertUuid(parentDocumentId, "parentDocumentId must be an uuid");
|
||||
}
|
||||
|
||||
if (index) ctx.assertPositiveInteger(index, "index must be an integer (>=0)");
|
||||
router.post("documents.create", auth(), createDocumentFromContext);
|
||||
router.post("documents.import", auth(), async (ctx) => {
|
||||
const file: any = Object.values(ctx.request.files)[0];
|
||||
|
||||
const user = ctx.state.user;
|
||||
authorize(user, "create", Document);
|
||||
|
||||
const collection = await Collection.scope({
|
||||
method: ["withMembership", user.id],
|
||||
}).findOne({
|
||||
where: {
|
||||
id: collectionId,
|
||||
teamId: user.teamId,
|
||||
},
|
||||
});
|
||||
authorize(user, "publish", collection);
|
||||
|
||||
let parentDocument;
|
||||
if (parentDocumentId) {
|
||||
parentDocument = await Document.findOne({
|
||||
where: {
|
||||
id: parentDocumentId,
|
||||
collectionId: collection.id,
|
||||
},
|
||||
});
|
||||
authorize(user, "read", parentDocument, { collection });
|
||||
}
|
||||
|
||||
let templateDocument;
|
||||
if (templateId) {
|
||||
templateDocument = await Document.findByPk(templateId, { userId: user.id });
|
||||
authorize(user, "read", templateDocument);
|
||||
}
|
||||
|
||||
let document = await Document.create({
|
||||
parentDocumentId,
|
||||
editorVersion,
|
||||
collectionId: collection.id,
|
||||
teamId: user.teamId,
|
||||
userId: user.id,
|
||||
lastModifiedById: user.id,
|
||||
createdById: user.id,
|
||||
template,
|
||||
templateId: templateDocument ? templateDocument.id : undefined,
|
||||
title: templateDocument ? templateDocument.title : title,
|
||||
text: templateDocument ? templateDocument.text : text,
|
||||
});
|
||||
|
||||
await Event.create({
|
||||
name: "documents.create",
|
||||
documentId: document.id,
|
||||
collectionId: document.collectionId,
|
||||
teamId: document.teamId,
|
||||
actorId: user.id,
|
||||
data: { title: document.title, templateId },
|
||||
const { text, title } = await documentImporter({
|
||||
user,
|
||||
file,
|
||||
ip: ctx.request.ip,
|
||||
});
|
||||
|
||||
if (publish) {
|
||||
await document.publish();
|
||||
ctx.body.text = text;
|
||||
ctx.body.title = title;
|
||||
|
||||
await Event.create({
|
||||
name: "documents.publish",
|
||||
documentId: document.id,
|
||||
collectionId: document.collectionId,
|
||||
teamId: document.teamId,
|
||||
actorId: user.id,
|
||||
data: { title: document.title },
|
||||
ip: ctx.request.ip,
|
||||
});
|
||||
}
|
||||
|
||||
// reload to get all of the data needed to present (user, collection etc)
|
||||
// we need to specify publishedAt to bypass default scope that only returns
|
||||
// published documents
|
||||
document = await Document.findOne({
|
||||
where: { id: document.id, publishedAt: document.publishedAt },
|
||||
});
|
||||
document.collection = collection;
|
||||
|
||||
ctx.body = {
|
||||
data: await presentDocument(document),
|
||||
policies: presentPolicies(user, [document]),
|
||||
};
|
||||
await createDocumentFromContext(ctx);
|
||||
});
|
||||
|
||||
router.post("documents.templatize", auth(), async (ctx) => {
|
||||
@@ -1073,4 +991,107 @@ router.post("documents.unpublish", auth(), async (ctx) => {
|
||||
};
|
||||
});
|
||||
|
||||
// TODO: update to actual `ctx` type
|
||||
export async function createDocumentFromContext(ctx: any) {
|
||||
const {
|
||||
title = "",
|
||||
text = "",
|
||||
publish,
|
||||
collectionId,
|
||||
parentDocumentId,
|
||||
templateId,
|
||||
template,
|
||||
index,
|
||||
} = ctx.body;
|
||||
const editorVersion = ctx.headers["x-editor-version"];
|
||||
|
||||
ctx.assertUuid(collectionId, "collectionId must be an uuid");
|
||||
if (parentDocumentId) {
|
||||
ctx.assertUuid(parentDocumentId, "parentDocumentId must be an uuid");
|
||||
}
|
||||
|
||||
if (index) ctx.assertPositiveInteger(index, "index must be an integer (>=0)");
|
||||
|
||||
const user = ctx.state.user;
|
||||
authorize(user, "create", Document);
|
||||
|
||||
const collection = await Collection.scope({
|
||||
method: ["withMembership", user.id],
|
||||
}).findOne({
|
||||
where: {
|
||||
id: collectionId,
|
||||
teamId: user.teamId,
|
||||
},
|
||||
});
|
||||
authorize(user, "publish", collection);
|
||||
|
||||
let parentDocument;
|
||||
if (parentDocumentId) {
|
||||
parentDocument = await Document.findOne({
|
||||
where: {
|
||||
id: parentDocumentId,
|
||||
collectionId: collection.id,
|
||||
},
|
||||
});
|
||||
authorize(user, "read", parentDocument, { collection });
|
||||
}
|
||||
|
||||
let templateDocument;
|
||||
if (templateId) {
|
||||
templateDocument = await Document.findByPk(templateId, { userId: user.id });
|
||||
authorize(user, "read", templateDocument);
|
||||
}
|
||||
|
||||
let document = await Document.create({
|
||||
parentDocumentId,
|
||||
editorVersion,
|
||||
collectionId: collection.id,
|
||||
teamId: user.teamId,
|
||||
userId: user.id,
|
||||
lastModifiedById: user.id,
|
||||
createdById: user.id,
|
||||
template,
|
||||
templateId: templateDocument ? templateDocument.id : undefined,
|
||||
title: templateDocument ? templateDocument.title : title,
|
||||
text: templateDocument ? templateDocument.text : text,
|
||||
});
|
||||
|
||||
await Event.create({
|
||||
name: "documents.create",
|
||||
documentId: document.id,
|
||||
collectionId: document.collectionId,
|
||||
teamId: document.teamId,
|
||||
actorId: user.id,
|
||||
data: { title: document.title, templateId },
|
||||
ip: ctx.request.ip,
|
||||
});
|
||||
|
||||
if (publish) {
|
||||
await document.publish();
|
||||
|
||||
await Event.create({
|
||||
name: "documents.publish",
|
||||
documentId: document.id,
|
||||
collectionId: document.collectionId,
|
||||
teamId: document.teamId,
|
||||
actorId: user.id,
|
||||
data: { title: document.title },
|
||||
ip: ctx.request.ip,
|
||||
});
|
||||
}
|
||||
|
||||
// reload to get all of the data needed to present (user, collection etc)
|
||||
// we need to specify publishedAt to bypass default scope that only returns
|
||||
// published documents
|
||||
document = await Document.findOne({
|
||||
where: { id: document.id, publishedAt: document.publishedAt },
|
||||
});
|
||||
document.collection = collection;
|
||||
|
||||
return (ctx.body = {
|
||||
data: await presentDocument(document),
|
||||
policies: presentPolicies(user, [document]),
|
||||
});
|
||||
}
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// @flow
|
||||
import Koa from "koa";
|
||||
import bodyParser from "koa-bodyparser";
|
||||
import bodyParser from "koa-body";
|
||||
import Router from "koa-router";
|
||||
|
||||
import { NotFoundError } from "../errors";
|
||||
@@ -31,8 +31,13 @@ const api = new Koa();
|
||||
const router = new Router();
|
||||
|
||||
// middlewares
|
||||
api.use(
|
||||
bodyParser({
|
||||
multipart: true,
|
||||
formidable: { maxFieldsSize: 10 * 1024 * 1024 },
|
||||
})
|
||||
);
|
||||
api.use(errorHandling());
|
||||
api.use(bodyParser());
|
||||
api.use(methodOverride());
|
||||
api.use(validation());
|
||||
api.use(apiWrapper());
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// @flow
|
||||
import addMonths from "date-fns/add_months";
|
||||
import Koa from "koa";
|
||||
import bodyParser from "koa-bodyparser";
|
||||
import bodyParser from "koa-body";
|
||||
import Router from "koa-router";
|
||||
import auth from "../middlewares/authentication";
|
||||
import validation from "../middlewares/validation";
|
||||
|
||||
113
server/commands/documentImporter.js
Normal file
113
server/commands/documentImporter.js
Normal file
@@ -0,0 +1,113 @@
|
||||
// @flow
|
||||
import fs from "fs";
|
||||
import File from "formidable/lib/file";
|
||||
import mammoth from "mammoth";
|
||||
import TurndownService from "turndown";
|
||||
import uuid from "uuid";
|
||||
import parseTitle from "../../shared/utils/parseTitle";
|
||||
import { Attachment, Event, User } from "../models";
|
||||
import dataURItoBuffer from "../utils/dataURItoBuffer";
|
||||
import parseImages from "../utils/parseImages";
|
||||
import { uploadToS3FromBuffer } from "../utils/s3";
|
||||
|
||||
// https://github.com/domchristie/turndown#options
|
||||
const turndownService = new TurndownService({
|
||||
hr: "---",
|
||||
bulletListMarker: "-",
|
||||
headingStyle: "atx",
|
||||
});
|
||||
|
||||
interface ImportableFile {
|
||||
type: string;
|
||||
getMarkdown: (file: any) => Promise<string>;
|
||||
}
|
||||
|
||||
const importMapping: ImportableFile[] = [
|
||||
{
|
||||
type:
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
getMarkdown: docxToMarkdown,
|
||||
},
|
||||
{
|
||||
type: "text/html",
|
||||
getMarkdown: htmlToMarkdown,
|
||||
},
|
||||
{
|
||||
type: "text/plain",
|
||||
getMarkdown: fileToMarkdown,
|
||||
},
|
||||
{
|
||||
type: "text/markdown",
|
||||
getMarkdown: fileToMarkdown,
|
||||
},
|
||||
];
|
||||
|
||||
async function fileToMarkdown(file): Promise<string> {
|
||||
return fs.promises.readFile(file.path, "utf8");
|
||||
}
|
||||
|
||||
async function docxToMarkdown(file): Promise<string> {
|
||||
const { value } = await mammoth.convertToHtml(file);
|
||||
return turndownService.turndown(value);
|
||||
}
|
||||
|
||||
async function htmlToMarkdown(file): Promise<string> {
|
||||
const value = await fs.promises.readFile(file.path, "utf8");
|
||||
return turndownService.turndown(value);
|
||||
}
|
||||
|
||||
export default async function documentImporter({
|
||||
file,
|
||||
user,
|
||||
ip,
|
||||
}: {
|
||||
user: User,
|
||||
file: File,
|
||||
ip: string,
|
||||
}): Promise<{ text: string, title: string }> {
|
||||
const fileInfo = importMapping.filter((item) => item.type === file.type)[0];
|
||||
let title = file.name.replace(/\.[^/.]+$/, "");
|
||||
let text = await fileInfo.getMarkdown(file);
|
||||
|
||||
// If the first line of the imported text looks like a markdown heading
|
||||
// then we can use this as the document title
|
||||
if (text.trim().startsWith("# ")) {
|
||||
const result = parseTitle(text);
|
||||
title = result.title;
|
||||
text = text.replace(`# ${title}\n`, "");
|
||||
}
|
||||
|
||||
// find data urls, convert to blobs, upload and write attachments
|
||||
const images = parseImages(text);
|
||||
const dataURIs = images.filter((href) => href.startsWith("data:"));
|
||||
|
||||
for (const uri of dataURIs) {
|
||||
const name = "imported";
|
||||
const key = `uploads/${user.id}/${uuid.v4()}/${name}`;
|
||||
const acl = process.env.AWS_S3_ACL || "private";
|
||||
const { buffer, type } = dataURItoBuffer(uri);
|
||||
const url = await uploadToS3FromBuffer(buffer, type, key, acl);
|
||||
|
||||
const attachment = await Attachment.create({
|
||||
key,
|
||||
acl,
|
||||
url,
|
||||
size: buffer.length,
|
||||
contentType: type,
|
||||
teamId: user.teamId,
|
||||
userId: user.id,
|
||||
});
|
||||
|
||||
await Event.create({
|
||||
name: "attachments.create",
|
||||
data: { name },
|
||||
teamId: user.teamId,
|
||||
userId: user.id,
|
||||
ip,
|
||||
});
|
||||
|
||||
text = text.replace(uri, attachment.redirectUrl);
|
||||
}
|
||||
|
||||
return { text, title };
|
||||
}
|
||||
77
server/commands/documentImporter.test.js
Normal file
77
server/commands/documentImporter.test.js
Normal file
@@ -0,0 +1,77 @@
|
||||
// @flow
|
||||
import path from "path";
|
||||
import File from "formidable/lib/file";
|
||||
import { Attachment } from "../models";
|
||||
import { buildUser } from "../test/factories";
|
||||
import { flushdb } from "../test/support";
|
||||
import documentImporter from "./documentImporter";
|
||||
|
||||
jest.mock("../utils/s3");
|
||||
|
||||
beforeEach(() => flushdb());
|
||||
|
||||
describe("documentImporter", () => {
|
||||
const ip = "127.0.0.1";
|
||||
|
||||
it("should convert Word Document to markdown", async () => {
|
||||
const user = await buildUser();
|
||||
const name = "images.docx";
|
||||
const file = new File({
|
||||
name,
|
||||
type:
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
path: path.resolve(__dirname, "..", "test", "fixtures", name),
|
||||
});
|
||||
|
||||
const response = await documentImporter({
|
||||
user,
|
||||
file,
|
||||
ip,
|
||||
});
|
||||
|
||||
const attachments = await Attachment.count();
|
||||
expect(attachments).toEqual(1);
|
||||
|
||||
expect(response.text).toContain("This is a test document for images");
|
||||
expect(response.text).toContain(";
|
||||
expect(response.title).toEqual("images");
|
||||
});
|
||||
|
||||
it("should convert HTML Document to markdown", async () => {
|
||||
const user = await buildUser();
|
||||
const name = "webpage.html";
|
||||
const file = new File({
|
||||
name,
|
||||
type: "text/html",
|
||||
path: path.resolve(__dirname, "..", "test", "fixtures", name),
|
||||
});
|
||||
|
||||
const response = await documentImporter({
|
||||
user,
|
||||
file,
|
||||
ip,
|
||||
});
|
||||
|
||||
expect(response.text).toContain("Text paragraph");
|
||||
expect(response.title).toEqual("Heading 1");
|
||||
});
|
||||
|
||||
it("should load markdown", async () => {
|
||||
const user = await buildUser();
|
||||
const name = "markdown.md";
|
||||
const file = new File({
|
||||
name,
|
||||
type: "text/plain",
|
||||
path: path.resolve(__dirname, "..", "test", "fixtures", name),
|
||||
});
|
||||
|
||||
const response = await documentImporter({
|
||||
user,
|
||||
file,
|
||||
ip,
|
||||
});
|
||||
|
||||
expect(response.text).toContain("This is a test paragraph");
|
||||
expect(response.title).toEqual("Heading 1");
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,5 @@
|
||||
// @flow
|
||||
import { type Context } from "koa";
|
||||
import { Document, Collection, Event } from "../models";
|
||||
import { Document, Collection, User, Event } from "../models";
|
||||
import { sequelize } from "../sequelize";
|
||||
|
||||
export default async function documentMover({
|
||||
@@ -11,7 +10,7 @@ export default async function documentMover({
|
||||
index,
|
||||
ip,
|
||||
}: {
|
||||
user: Context,
|
||||
user: User,
|
||||
document: Document,
|
||||
collectionId: string,
|
||||
parentDocumentId?: string,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/* eslint-disable flowtype/require-valid-file-annotation */
|
||||
import documentMover from "../commands/documentMover";
|
||||
import { buildDocument, buildCollection } from "../test/factories";
|
||||
import { flushdb, seed } from "../test/support";
|
||||
import documentMover from "./documentMover";
|
||||
|
||||
beforeEach(() => flushdb());
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/* eslint-disable flowtype/require-valid-file-annotation */
|
||||
import userInviter from "../commands/userInviter";
|
||||
import { buildUser } from "../test/factories";
|
||||
import { flushdb } from "../test/support";
|
||||
import userInviter from "./userInviter";
|
||||
|
||||
beforeEach(() => flushdb());
|
||||
|
||||
|
||||
BIN
server/test/fixtures/images.docx
vendored
Normal file
BIN
server/test/fixtures/images.docx
vendored
Normal file
Binary file not shown.
8
server/test/fixtures/markdown.md
vendored
Normal file
8
server/test/fixtures/markdown.md
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# Heading 1
|
||||
|
||||
## Heading 2
|
||||
|
||||
This is a test paragraph
|
||||
|
||||
- list item 1
|
||||
- list item 2
|
||||
8
server/test/fixtures/webpage.html
vendored
Normal file
8
server/test/fixtures/webpage.html
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
<html>
|
||||
|
||||
<body>
|
||||
<h1>Heading 1</h1>
|
||||
<p>Text paragraph</p>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
5
server/utils/__mocks__/s3.js
Normal file
5
server/utils/__mocks__/s3.js
Normal file
@@ -0,0 +1,5 @@
|
||||
/* eslint-disable flowtype/require-valid-file-annotation */
|
||||
|
||||
export const uploadToS3FromBuffer = jest.fn().mockReturnValue("/endpoint/key");
|
||||
|
||||
export const publicS3Endpoint = jest.fn().mockReturnValue("http://mock");
|
||||
20
server/utils/dataURItoBuffer.js
Normal file
20
server/utils/dataURItoBuffer.js
Normal file
@@ -0,0 +1,20 @@
|
||||
// @flow
|
||||
|
||||
export default function dataURItoBuffer(dataURI: string) {
|
||||
const split = dataURI.split(",");
|
||||
|
||||
if (!dataURI.startsWith("data") || split.length <= 1) {
|
||||
throw new Error("Not a dataURI");
|
||||
}
|
||||
|
||||
// separate out the mime component
|
||||
const type = split[0].split(":")[1].split(";")[0];
|
||||
|
||||
// convert base64 to buffer
|
||||
const buffer = Buffer.from(split[1], "base64");
|
||||
|
||||
return {
|
||||
buffer,
|
||||
type,
|
||||
};
|
||||
}
|
||||
20
server/utils/dataURItoBuffer.test.js
Normal file
20
server/utils/dataURItoBuffer.test.js
Normal file
@@ -0,0 +1,20 @@
|
||||
// @flow
|
||||
import dataURItoBuffer from "./dataURItoBuffer";
|
||||
|
||||
it("should parse value data URI", () => {
|
||||
const response = dataURItoBuffer(
|
||||
`data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAMAAADXqc3KAAAB+FBMVEUAAAA/mUPidDHiLi5Cn0XkNTPmeUrkdUg/m0Q0pEfcpSbwaVdKskg+lUP4zA/iLi3msSHkOjVAmETdJSjtYFE/lkPnRj3sWUs8kkLeqCVIq0fxvhXqUkbVmSjwa1n1yBLepyX1xxP0xRXqUkboST9KukpHpUbuvRrzrhF/ljbwaljuZFM4jELaoSdLtElJrUj1xxP6zwzfqSU4i0HYnydMtUlIqUfywxb60AxZqEXaoifgMCXptR9MtklHpEY2iUHWnSjvvRr70QujkC+pUC/90glMuEnlOjVMt0j70QriLS1LtEnnRj3qUUXfIidOjsxAhcZFo0bjNDH0xxNLr0dIrUdmntVTkMoyfL8jcLBRuErhJyrgKyb4zA/5zg3tYFBBmUTmQTnhMinruBzvvhnxwxZ/st+Ktt5zp9hqota2vtK6y9FemNBblc9HiMiTtMbFtsM6gcPV2r6dwroseLrMrbQrdLGdyKoobKbo3Zh+ynrgVllZulTsXE3rV0pIqUf42UVUo0JyjEHoS0HmsiHRGR/lmRz/1hjqnxjvpRWfwtOhusaz0LRGf7FEfbDVmqHXlJeW0pbXq5bec3fX0nTnzmuJuWvhoFFhm0FtrziBsjaAaDCYWC+uSi6jQS3FsSfLJiTirCOkuCG1KiG+wSC+GBvgyhTszQ64Z77KAAAARXRSTlMAIQRDLyUgCwsE6ebm5ubg2dLR0byXl4FDQzU1NDEuLSUgC+vr6urq6ubb29vb2tra2tG8vLu7u7uXl5eXgYGBgYGBLiUALabIAAABsElEQVQoz12S9VPjQBxHt8VaOA6HE+AOzv1wd7pJk5I2adpCC7RUcHd3d3fXf5PvLkxheD++z+yb7GSRlwD/+Hj/APQCZWxM5M+goF+RMbHK594v+tPoiN1uHxkt+xzt9+R9wnRTZZQpXQ0T5uP1IQxToyOAZiQu5HEpjeA4SWIoksRxNiGC1tRZJ4LNxgHgnU5nJZBDvuDdl8lzQRBsQ+s9PZt7s7Pz8wsL39/DkIfZ4xlB2Gqsq62ta9oxVlVrNZpihFRpGO9fzQw1ms0NDWZz07iGkJmIFH8xxkc3a/WWlubmFkv9AB2SEpDvKxbjidN2faseaNV3zoHXvv7wMODJdkOHAegweAfFPx4G67KluxzottCU9n8CUqXzcIQdXOytAHqXxomvykhEKN9EFutG22p//0rbNvHVxiJywa8yS2KDfV1dfbu31H8jF1RHiTKtWYeHxUvq3bn0pyjCRaiRU6aDO+gb3aEfEeVNsDgm8zzLy9egPa7Qt8TSJdwhjplk06HH43ZNJ3s91KKCHQ5x4sw1fRGYDZ0n1L4FKb9/BP5JLYxToheoFCVxz57PPS8UhhEpLBVeAAAAAElFTkSuQmCC`
|
||||
);
|
||||
expect(response.buffer).toBeTruthy();
|
||||
expect(response.type).toBe("image/png");
|
||||
});
|
||||
|
||||
it("should throw an error with junk input", () => {
|
||||
let err;
|
||||
try {
|
||||
dataURItoBuffer("what");
|
||||
} catch (error) {
|
||||
err = error;
|
||||
}
|
||||
expect(err).toBeTruthy();
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
/* eslint-disable flowtype/require-valid-file-annotation */
|
||||
// @flow
|
||||
import parseDocumentIds from "./parseDocumentIds";
|
||||
|
||||
it("should not return non links", () => {
|
||||
|
||||
26
server/utils/parseImages.js
Normal file
26
server/utils/parseImages.js
Normal file
@@ -0,0 +1,26 @@
|
||||
// @flow
|
||||
import { parser } from "rich-markdown-editor";
|
||||
|
||||
export default function parseImages(text: string): string[] {
|
||||
const value = parser.parse(text);
|
||||
const images = [];
|
||||
|
||||
function findImages(node) {
|
||||
if (node.type.name === "image") {
|
||||
if (!images.includes(node.attrs.src)) {
|
||||
images.push(node.attrs.src);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (!node.content.size) {
|
||||
return;
|
||||
}
|
||||
|
||||
node.content.descendants(findImages);
|
||||
}
|
||||
|
||||
findImages(value);
|
||||
return images;
|
||||
}
|
||||
24
server/utils/parseImages.test.js
Normal file
24
server/utils/parseImages.test.js
Normal file
@@ -0,0 +1,24 @@
|
||||
// @flow
|
||||
import parseImages from "./parseImages";
|
||||
|
||||
it("should not return non images", () => {
|
||||
expect(parseImages(`# Header`).length).toBe(0);
|
||||
});
|
||||
|
||||
it("should return an array of images", () => {
|
||||
const result = parseImages(`# Header
|
||||
|
||||

|
||||
`);
|
||||
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0]).toBe("/attachments/image.png");
|
||||
});
|
||||
|
||||
it("should not return non document links", () => {
|
||||
expect(parseImages(`[google](http://www.google.com)`).length).toBe(0);
|
||||
});
|
||||
|
||||
it("should not return non document relative links", () => {
|
||||
expect(parseImages(`[relative](/developers)`).length).toBe(0);
|
||||
});
|
||||
@@ -89,6 +89,28 @@ export const publicS3Endpoint = (isServerUpload?: boolean) => {
|
||||
}${AWS_S3_UPLOAD_BUCKET_NAME}`;
|
||||
};
|
||||
|
||||
export const uploadToS3FromBuffer = async (
|
||||
buffer: Buffer,
|
||||
contentType: string,
|
||||
key: string,
|
||||
acl: string
|
||||
) => {
|
||||
await s3
|
||||
.putObject({
|
||||
ACL: acl,
|
||||
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
|
||||
Key: key,
|
||||
ContentType: contentType,
|
||||
ContentLength: buffer.length,
|
||||
ServerSideEncryption: "AES256",
|
||||
Body: buffer,
|
||||
})
|
||||
.promise();
|
||||
|
||||
const endpoint = publicS3Endpoint(true);
|
||||
return `${endpoint}/${key}`;
|
||||
};
|
||||
|
||||
export const uploadToS3FromUrl = async (
|
||||
url: string,
|
||||
key: string,
|
||||
|
||||
Reference in New Issue
Block a user