refactor: documents.batchImport -> collections.import

This commit is contained in:
Tom Moor
2020-12-28 18:51:12 -08:00
parent d79933887d
commit caee7afde2
10 changed files with 72 additions and 72 deletions

View File

@@ -1,7 +1,8 @@
// @flow
import fs from "fs";
import Router from "koa-router";
import { ValidationError } from "../errors";
import collectionImporter from "../commands/collectionImporter";
import { ValidationError, InvalidRequestError } from "../errors";
import { exportCollections } from "../logistics";
import auth from "../middlewares/authentication";
import {
@@ -89,6 +90,44 @@ router.post("collections.info", auth(), async (ctx) => {
};
});
router.post("collections.import", auth(), async (ctx) => {
const { type } = ctx.body;
ctx.assertIn(type, ["outline"], "type must be one of 'outline'");
if (!ctx.is("multipart/form-data")) {
throw new InvalidRequestError("Request type must be multipart/form-data");
}
const file: any = Object.values(ctx.request.files)[0];
ctx.assertPresent(file, "file is required");
if (file.type !== "application/zip") {
throw new InvalidRequestError("File type must be a zip");
}
const user = ctx.state.user;
authorize(user, "import", Collection);
const { documents, attachments, collections } = await collectionImporter({
file,
user,
type,
ip: ctx.request.ip,
});
ctx.body = {
data: {
attachmentCount: attachments.length,
documentCount: documents.length,
collectionCount: collections.length,
collections: collections.map((collection) =>
presentCollection(collection)
),
},
policies: presentPolicies(user, collections),
};
});
router.post("collections.add_group", auth(), async (ctx) => {
const { id, groupId, permission = "read_write" } = ctx.body;
ctx.assertUuid(id, "id is required");

View File

@@ -2,7 +2,6 @@
import Router from "koa-router";
import Sequelize from "sequelize";
import { subtractDate } from "../../shared/utils/date";
import documentBatchImporter from "../commands/documentBatchImporter";
import documentCreator from "../commands/documentCreator";
import documentImporter from "../commands/documentImporter";
import documentMover from "../commands/documentMover";
@@ -1106,44 +1105,6 @@ router.post("documents.unpublish", auth(), async (ctx) => {
};
});
router.post("documents.batchImport", auth(), async (ctx) => {
const { type } = ctx.body;
ctx.assertIn(type, ["outline"], "type must be one of 'outline'");
if (!ctx.is("multipart/form-data")) {
throw new InvalidRequestError("Request type must be multipart/form-data");
}
const file: any = Object.values(ctx.request.files)[0];
ctx.assertPresent(file, "file is required");
if (file.type !== "application/zip") {
throw new InvalidRequestError("File type must be a zip");
}
const user = ctx.state.user;
authorize(user, "batchImport", Document);
const { documents, attachments, collections } = await documentBatchImporter({
file,
user,
type,
ip: ctx.request.ip,
});
ctx.body = {
data: {
attachmentCount: attachments.length,
documentCount: documents.length,
collectionCount: collections.length,
collections: collections.map((collection) =>
presentCollection(collection)
),
},
policies: presentPolicies(user, collections),
};
});
router.post("documents.import", auth(), async (ctx) => {
const { publish, collectionId, parentDocumentId, index } = ctx.body;

View File

@@ -16,7 +16,7 @@ import documentImporter from "./documentImporter";
const log = debug("commands");
export default async function documentBatchImporter({
export default async function collectionImporter({
file,
type,
user,

View File

@@ -4,13 +4,13 @@ import File from "formidable/lib/file";
import { Attachment, Document, Collection } from "../models";
import { buildUser } from "../test/factories";
import { flushdb } from "../test/support";
import documentBatchImporter from "./documentBatchImporter";
import collectionImporter from "./collectionImporter";
jest.mock("../utils/s3");
beforeEach(() => flushdb());
describe("documentBatchImporter", () => {
describe("collectionImporter", () => {
const ip = "127.0.0.1";
it("should import documents in outline format", async () => {
@@ -22,7 +22,7 @@ describe("documentBatchImporter", () => {
path: path.resolve(__dirname, "..", "test", "fixtures", name),
});
const response = await documentBatchImporter({
const response = await collectionImporter({
type: "outline",
user,
file,
@@ -49,7 +49,7 @@ describe("documentBatchImporter", () => {
let error;
try {
await documentBatchImporter({
await collectionImporter({
type: "outline",
user,
file,
@@ -73,7 +73,7 @@ describe("documentBatchImporter", () => {
let error;
try {
await documentBatchImporter({
await collectionImporter({
type: "outline",
user,
file,

View File

@@ -9,6 +9,11 @@ const { allow } = policy;
allow(User, "create", Collection);
allow(User, "import", Collection, (actor) => {
if (actor.isAdmin) return true;
throw new AdminRequiredError();
});
allow(User, ["read", "export"], Collection, (user, collection) => {
if (!collection || user.teamId !== collection.teamId) return false;

View File

@@ -1,6 +1,5 @@
// @flow
import invariant from "invariant";
import { AdminRequiredError } from "../errors";
import { Document, Revision, User } from "../models";
import policy from "./policy";
@@ -8,11 +7,6 @@ const { allow, cannot } = policy;
allow(User, "create", Document);
allow(User, "batchImport", Document, (actor) => {
if (actor.isAdmin) return true;
throw new AdminRequiredError();
});
allow(User, ["read", "download"], Document, (user, document) => {
// existence of collection option is not required here to account for share tokens
if (document.collection && cannot(user, "read", document.collection)) {