diff --git a/server/middlewares/validate.ts b/server/middlewares/validate.ts index 2c0684352..24f7e55ef 100644 --- a/server/middlewares/validate.ts +++ b/server/middlewares/validate.ts @@ -1,16 +1,20 @@ import { Next } from "koa"; -import { z } from "zod"; +import { z, ZodError } from "zod"; import { ValidationError } from "@server/errors"; -import { APIContext } from "@server/types"; +import { APIContext, BaseReq } from "@server/types"; -export default function validate(schema: T) { - return async function validateMiddleware(ctx: APIContext, next: Next) { +export default function validate>(schema: T) { + return async function validateMiddleware(ctx: APIContext, next: Next) { try { - ctx.input = schema.parse(ctx.request.body); + ctx.input = schema.parse(ctx.request); } catch (err) { - const { path, message } = err.issues[0]; - const [prefix = "ValidationError"] = path; - throw ValidationError(`${prefix}: ${message}`); + if (err instanceof ZodError) { + const { path, message } = err.issues[0]; + const errMessage = + path.length > 0 ? `${path[path.length - 1]}: ${message}` : message; + throw ValidationError(errMessage); + } + ctx.throw(err); } return next(); }; diff --git a/server/routes/api/BaseSchema.ts b/server/routes/api/BaseSchema.ts new file mode 100644 index 000000000..04d7dba43 --- /dev/null +++ b/server/routes/api/BaseSchema.ts @@ -0,0 +1,8 @@ +import { z } from "zod"; + +const BaseSchema = z.object({ + body: z.unknown(), + query: z.unknown(), +}); + +export default BaseSchema; diff --git a/server/routes/api/attachments/attachments.test.ts b/server/routes/api/attachments/attachments.test.ts index a71c37479..2cd0879b9 100644 --- a/server/routes/api/attachments/attachments.test.ts +++ b/server/routes/api/attachments/attachments.test.ts @@ -307,6 +307,24 @@ describe("#attachments.redirect", () => { expect(res.status).toEqual(302); }); + it("should return a redirect for the attachment if id supplied via query params", async () => { + const user = await buildUser(); + const attachment = await buildAttachment({ + teamId: user.teamId, + userId: user.id, + }); + const res = await server.post( + `/api/attachments.redirect?id=${attachment.id}`, + { + body: { + token: user.getJwtToken(), + }, + redirect: "manual", + } + ); + expect(res.status).toEqual(302); + }); + it("should return a redirect for an attachment belonging to a trashed document user has access to", async () => { const user = await buildUser(); const collection = await buildCollection({ @@ -385,4 +403,16 @@ describe("#attachments.redirect", () => { }); expect(res.status).toEqual(403); }); + + it("should fail in absence of id", async () => { + const user = await buildUser(); + const res = await server.post("/api/attachments.redirect", { + body: { + token: user.getJwtToken(), + }, + }); + const body = await res.json(); + expect(res.status).toEqual(400); + expect(body.message).toEqual("id is required"); + }); }); diff --git a/server/routes/api/attachments/attachments.ts b/server/routes/api/attachments/attachments.ts index 9a4281abb..9a8d485f9 100644 --- a/server/routes/api/attachments/attachments.ts +++ b/server/routes/api/attachments/attachments.ts @@ -13,7 +13,7 @@ import { authorize } from "@server/policies"; import { presentAttachment } from "@server/presenters"; import { APIContext } from "@server/types"; import { getPresignedPost, publicS3Endpoint } from "@server/utils/s3"; -import { assertIn, assertUuid } from "@server/validation"; +import { assertIn } from "@server/validation"; import * as T from "./schema"; const router = new Router(); @@ -24,7 +24,7 @@ router.post( validate(T.AttachmentsCreateSchema), transaction(), async (ctx: APIContext) => { - const { name, documentId, contentType, size, preset } = ctx.input; + const { name, documentId, contentType, size, preset } = ctx.input.body; const { auth, transaction } = ctx.state; const { user } = auth; @@ -113,7 +113,7 @@ router.post( auth(), validate(T.AttachmentDeleteSchema), async (ctx: APIContext) => { - const { id } = ctx.input; + const { id } = ctx.input.body; const { user } = ctx.state.auth; const attachment = await Attachment.findByPk(id, { rejectOnEmpty: true, @@ -141,9 +141,10 @@ router.post( } ); -const handleAttachmentsRedirect = async (ctx: APIContext) => { - const id = ctx.request.body?.id ?? ctx.request.query?.id; - assertUuid(id, "id is required"); +const handleAttachmentsRedirect = async ( + ctx: APIContext +) => { + const id = (ctx.input.body.id ?? ctx.input.query.id) as string; const { user } = ctx.state.auth; const attachment = await Attachment.findByPk(id, { @@ -165,7 +166,17 @@ const handleAttachmentsRedirect = async (ctx: APIContext) => { } }; -router.get("attachments.redirect", auth(), handleAttachmentsRedirect); -router.post("attachments.redirect", auth(), handleAttachmentsRedirect); +router.get( + "attachments.redirect", + auth(), + validate(T.AttachmentsRedirectSchema), + handleAttachmentsRedirect +); +router.post( + "attachments.redirect", + auth(), + validate(T.AttachmentsRedirectSchema), + handleAttachmentsRedirect +); export default router; diff --git a/server/routes/api/attachments/schema.ts b/server/routes/api/attachments/schema.ts index 763960877..8604f0ad2 100644 --- a/server/routes/api/attachments/schema.ts +++ b/server/routes/api/attachments/schema.ts @@ -1,28 +1,49 @@ +import { isEmpty } from "lodash"; import { z } from "zod"; import { AttachmentPreset } from "@shared/types"; +import BaseSchema from "@server/routes/api/BaseSchema"; -export const AttachmentsCreateSchema = z.object({ - /** Attachment name */ - name: z.string(), +export const AttachmentsCreateSchema = BaseSchema.extend({ + body: z.object({ + /** Attachment name */ + name: z.string(), - /** Id of the document to which the Attachment belongs */ - documentId: z.string().uuid().optional(), + /** Id of the document to which the Attachment belongs */ + documentId: z.string().uuid().optional(), - /** File size of the Attachment */ - size: z.number(), + /** File size of the Attachment */ + size: z.number(), - /** Content-Type of the Attachment */ - contentType: z.string().optional().default("application/octet-stream"), + /** Content-Type of the Attachment */ + contentType: z.string().optional().default("application/octet-stream"), - /** Attachment type */ - preset: z.nativeEnum(AttachmentPreset), + /** Attachment type */ + preset: z.nativeEnum(AttachmentPreset), + }), }); export type AttachmentCreateReq = z.infer; -export const AttachmentDeleteSchema = z.object({ - /** Id of the attachment to be deleted */ - id: z.string().uuid(), +export const AttachmentDeleteSchema = BaseSchema.extend({ + body: z.object({ + /** Id of the attachment to be deleted */ + id: z.string().uuid(), + }), }); export type AttachmentDeleteReq = z.infer; + +export const AttachmentsRedirectSchema = BaseSchema.extend({ + body: z.object({ + /** Id of the attachment to be deleted */ + id: z.string().uuid().optional(), + }), + query: z.object({ + /** Id of the attachment to be deleted */ + id: z.string().uuid().optional(), + }), +}).refine((req) => !(isEmpty(req.body.id) && isEmpty(req.query.id)), { + message: "id is required", +}); + +export type AttachmentsRedirectReq = z.infer; diff --git a/server/routes/api/documents/__snapshots__/documents.test.ts.snap b/server/routes/api/documents/__snapshots__/documents.test.ts.snap index a045cdb84..2115718d9 100644 --- a/server/routes/api/documents/__snapshots__/documents.test.ts.snap +++ b/server/routes/api/documents/__snapshots__/documents.test.ts.snap @@ -66,7 +66,7 @@ Object { exports[`#documents.update should require text while appending 1`] = ` Object { "error": "validation_error", - "message": "ValidationError: text is required while appending", + "message": "text is required while appending", "ok": false, "status": 400, } diff --git a/server/routes/api/documents/documents.test.ts b/server/routes/api/documents/documents.test.ts index 166c74d2a..2a35bf62e 100644 --- a/server/routes/api/documents/documents.test.ts +++ b/server/routes/api/documents/documents.test.ts @@ -30,9 +30,7 @@ describe("#documents.info", () => { }); const body = await res.json(); expect(res.status).toEqual(400); - expect(body.message).toEqual( - "ValidationError: one of id or shareId is required" - ); + expect(body.message).toEqual("one of id or shareId is required"); }); it("should return published document", async () => { @@ -1824,7 +1822,7 @@ describe("#documents.move", () => { const body = await res.json(); expect(res.status).toEqual(400); expect(body.message).toEqual( - "ValidationError: infinite loop detected, cannot nest a document inside itself" + "infinite loop detected, cannot nest a document inside itself" ); }); @@ -2288,7 +2286,7 @@ describe("#documents.create", () => { expect(res.status).toEqual(400); expect(body.message).toBe( - "ValidationError: collectionId is required to create a template document" + "collectionId is required to create a template document" ); }); @@ -2306,9 +2304,7 @@ describe("#documents.create", () => { const body = await res.json(); expect(res.status).toEqual(400); - expect(body.message).toBe( - "ValidationError: collectionId is required to publish" - ); + expect(body.message).toBe("collectionId is required to publish"); }); it("should not allow creating a nested doc without a collection", async () => { @@ -2326,7 +2322,7 @@ describe("#documents.create", () => { expect(res.status).toEqual(400); expect(body.message).toBe( - "ValidationError: collectionId is required to create a nested document" + "collectionId is required to create a nested document" ); }); diff --git a/server/routes/api/documents/documents.ts b/server/routes/api/documents/documents.ts index 75e8da8e0..f73b19b86 100644 --- a/server/routes/api/documents/documents.ts +++ b/server/routes/api/documents/documents.ts @@ -59,7 +59,7 @@ router.post( pagination(), validate(T.DocumentsListSchema), async (ctx: APIContext) => { - let { sort } = ctx.input; + let { sort } = ctx.input.body; const { direction, template, @@ -67,7 +67,7 @@ router.post( backlinkDocumentId, parentDocumentId, userId: createdById, - } = ctx.input; + } = ctx.input.body; // always filter by the current team const { user } = ctx.state.auth; @@ -176,7 +176,7 @@ router.post( pagination(), validate(T.DocumentsArchivedSchema), async (ctx: APIContext) => { - const { sort, direction } = ctx.input; + const { sort, direction } = ctx.input.body; const { user } = ctx.state.auth; const collectionIds = await user.collectionIds(); const collectionScope: Readonly = { @@ -220,7 +220,7 @@ router.post( pagination(), validate(T.DocumentsDeletedSchema), async (ctx: APIContext) => { - const { sort, direction } = ctx.input; + const { sort, direction } = ctx.input.body; const { user } = ctx.state.auth; const collectionIds = await user.collectionIds({ paranoid: false, @@ -280,7 +280,7 @@ router.post( pagination(), validate(T.DocumentsViewedSchema), async (ctx: APIContext) => { - const { sort, direction } = ctx.input; + const { sort, direction } = ctx.input.body; const { user } = ctx.state.auth; const collectionIds = await user.collectionIds(); const userId = user.id; @@ -333,7 +333,7 @@ router.post( pagination(), validate(T.DocumentsDraftsSchema), async (ctx: APIContext) => { - const { collectionId, dateFilter, direction, sort } = ctx.input; + const { collectionId, dateFilter, direction, sort } = ctx.input.body; const { user } = ctx.state.auth; if (collectionId) { @@ -396,7 +396,7 @@ router.post( }), validate(T.DocumentsInfoSchema), async (ctx: APIContext) => { - const { id, shareId, apiVersion } = ctx.input; + const { id, shareId, apiVersion } = ctx.input.body; const { user } = ctx.state.auth; const teamFromCtx = await getTeamFromContext(ctx); const { document, share, collection } = await documentLoader({ @@ -442,7 +442,7 @@ router.post( }), validate(T.DocumentsExportSchema), async (ctx: APIContext) => { - const { id } = ctx.input; + const { id } = ctx.input.body; const { user } = ctx.state.auth; const accept = ctx.request.headers["accept"]; @@ -494,7 +494,7 @@ router.post( auth({ member: true }), validate(T.DocumentsRestoreSchema), async (ctx: APIContext) => { - const { id, collectionId, revisionId } = ctx.input; + const { id, collectionId, revisionId } = ctx.input.body; const { user } = ctx.state.auth; const document = await Document.findByPk(id, { userId: user.id, @@ -604,7 +604,7 @@ router.post( dateFilter, collectionId, userId, - } = ctx.input; + } = ctx.input.body; const { offset, limit } = ctx.state.pagination; const { user } = ctx.state.auth; let collaboratorIds = undefined; @@ -660,7 +660,7 @@ router.post( shareId, snippetMinWords, snippetMaxWords, - } = ctx.input; + } = ctx.input.body; const { offset, limit } = ctx.state.pagination; // Unfortunately, this still doesn't adequately handle cases when auth is optional @@ -765,7 +765,7 @@ router.post( auth({ member: true }), validate(T.DocumentsTemplatizeSchema), async (ctx: APIContext) => { - const { id } = ctx.input; + const { id } = ctx.input.body; const { user } = ctx.state.auth; const original = await Document.findByPk(id, { @@ -826,7 +826,7 @@ router.post( templateId, collectionId, append, - } = ctx.input; + } = ctx.input.body; const editorVersion = ctx.headers["x-editor-version"] as string | undefined; const { user } = ctx.state.auth; let collection: Collection | null | undefined; @@ -887,7 +887,7 @@ router.post( auth(), validate(T.DocumentsMoveSchema), async (ctx: APIContext) => { - const { id, collectionId, parentDocumentId, index } = ctx.input; + const { id, collectionId, parentDocumentId, index } = ctx.input.body; const { user } = ctx.state.auth; const document = await Document.findByPk(id, { userId: user.id, @@ -941,7 +941,7 @@ router.post( auth(), validate(T.DocumentsArchiveSchema), async (ctx: APIContext) => { - const { id } = ctx.input; + const { id } = ctx.input.body; const { user } = ctx.state.auth; const document = await Document.findByPk(id, { @@ -974,7 +974,7 @@ router.post( auth(), validate(T.DocumentsDeleteSchema), async (ctx: APIContext) => { - const { id, permanent } = ctx.input; + const { id, permanent } = ctx.input.body; const { user } = ctx.state.auth; if (permanent) { @@ -1039,7 +1039,7 @@ router.post( auth(), validate(T.DocumentsUnpublishSchema), async (ctx: APIContext) => { - const { id } = ctx.input; + const { id } = ctx.input.body; const { user } = ctx.state.auth; const document = await Document.findByPk(id, { @@ -1083,9 +1083,7 @@ router.post( throw InvalidRequestError("Request type must be multipart/form-data"); } - // String as this is always multipart/form-data - const publish = ctx.input.publish === "true"; - const { collectionId, parentDocumentId } = ctx.input; + const { collectionId, parentDocumentId, publish } = ctx.input.body; const file = ctx.request.files ? Object.values(ctx.request.files)[0] @@ -1173,7 +1171,7 @@ router.post( parentDocumentId, templateId, template, - } = ctx.input; + } = ctx.input.body; const editorVersion = ctx.headers["x-editor-version"] as string | undefined; const { user } = ctx.state.auth; diff --git a/server/routes/api/documents/schema.ts b/server/routes/api/documents/schema.ts index 9d01d3a6d..78fb92339 100644 --- a/server/routes/api/documents/schema.ts +++ b/server/routes/api/documents/schema.ts @@ -2,6 +2,7 @@ import { isEmpty } from "lodash"; import isUUID from "validator/lib/isUUID"; import { z } from "zod"; import { SHARE_URL_SLUG_REGEX } from "@shared/utils/urlHelpers"; +import BaseSchema from "@server/routes/api/BaseSchema"; const DocumentsSortParamsSchema = z.object({ /** Specifies the attributes by which documents will be sorted in the list */ @@ -39,63 +40,70 @@ const BaseIdSchema = z.object({ id: z.string(), }); -export const DocumentsListSchema = DocumentsSortParamsSchema.extend({ - /** Id of the user who created the doc */ - userId: z.string().uuid().optional(), +export const DocumentsListSchema = BaseSchema.extend({ + body: DocumentsSortParamsSchema.extend({ + /** Id of the user who created the doc */ + userId: z.string().uuid().optional(), - /** Alias for userId - kept for backwards compatibility */ - user: z.string().uuid().optional(), + /** Alias for userId - kept for backwards compatibility */ + user: z.string().uuid().optional(), - /** Id of the collection to which the document belongs */ - collectionId: z.string().uuid().optional(), + /** Id of the collection to which the document belongs */ + collectionId: z.string().uuid().optional(), - /** Alias for collectionId - kept for backwards compatibility */ - collection: z.string().uuid().optional(), + /** Alias for collectionId - kept for backwards compatibility */ + collection: z.string().uuid().optional(), - /** Id of the backlinked document */ - backlinkDocumentId: z.string().uuid().optional(), + /** Id of the backlinked document */ + backlinkDocumentId: z.string().uuid().optional(), - /** Id of the parent document to which the document belongs */ - parentDocumentId: z.string().uuid().nullish(), + /** Id of the parent document to which the document belongs */ + parentDocumentId: z.string().uuid().nullish(), - /** Boolean which denotes whether the document is a template */ - template: z.boolean().optional(), -}) + /** Boolean which denotes whether the document is a template */ + template: z.boolean().optional(), + }), // Maintains backwards compatibility - .transform((doc) => { - doc.collectionId = doc.collectionId || doc.collection; - doc.userId = doc.userId || doc.user; - delete doc.collection; - delete doc.user; +}).transform((req) => { + req.body.collectionId = req.body.collectionId || req.body.collection; + req.body.userId = req.body.userId || req.body.user; + delete req.body.collection; + delete req.body.user; - return doc; - }); + return req; +}); export type DocumentsListReq = z.infer; -export const DocumentsArchivedSchema = DocumentsSortParamsSchema.extend({}); +export const DocumentsArchivedSchema = BaseSchema.extend({ + body: DocumentsSortParamsSchema.extend({}), +}); export type DocumentsArchivedReq = z.infer; -export const DocumentsDeletedSchema = DocumentsSortParamsSchema.extend({}); +export const DocumentsDeletedSchema = BaseSchema.extend({ + body: DocumentsSortParamsSchema.extend({}), +}); export type DocumentsDeletedReq = z.infer; -export const DocumentsViewedSchema = DocumentsSortParamsSchema.extend({}); +export const DocumentsViewedSchema = BaseSchema.extend({ + body: DocumentsSortParamsSchema.extend({}), +}); export type DocumentsViewedReq = z.infer; -export const DocumentsDraftsSchema = DocumentsSortParamsSchema.merge( - DateFilterSchema -).extend({ - /** Id of the collection to which the document belongs */ - collectionId: z.string().uuid().optional(), +export const DocumentsDraftsSchema = BaseSchema.extend({ + body: DocumentsSortParamsSchema.merge(DateFilterSchema).extend({ + /** Id of the collection to which the document belongs */ + collectionId: z.string().uuid().optional(), + }), }); export type DocumentsDraftsReq = z.infer; -export const DocumentsInfoSchema = z - .object({ +export const DocumentsInfoSchema = BaseSchema.extend({ + body: z.object({ /** Id of the document to be retrieved */ id: z.string().optional(), @@ -107,136 +115,154 @@ export const DocumentsInfoSchema = z /** Version of the API to be used */ apiVersion: z.number().optional(), - }) - .refine((obj) => !(isEmpty(obj.id) && isEmpty(obj.shareId)), { - message: "one of id or shareId is required", - }); + }), +}).refine((req) => !(isEmpty(req.body.id) && isEmpty(req.body.shareId)), { + message: "one of id or shareId is required", +}); export type DocumentsInfoReq = z.infer; -export const DocumentsExportSchema = BaseIdSchema.extend({}); +export const DocumentsExportSchema = BaseSchema.extend({ + body: BaseIdSchema, +}); export type DocumentsExportReq = z.infer; -export const DocumentsRestoreSchema = BaseIdSchema.extend({ - /** Id of the collection to which the document belongs */ - collectionId: z.string().uuid().optional(), +export const DocumentsRestoreSchema = BaseSchema.extend({ + body: BaseIdSchema.extend({ + /** Id of the collection to which the document belongs */ + collectionId: z.string().uuid().optional(), - /** Id of document revision */ - revisionId: z.string().uuid().optional(), + /** Id of document revision */ + revisionId: z.string().uuid().optional(), + }), }); export type DocumentsRestoreReq = z.infer; -export const DocumentsSearchSchema = SearchQuerySchema.merge( - DateFilterSchema -).extend({ - /** Whether to include archived docs in results */ - includeArchived: z.boolean().optional(), +export const DocumentsSearchSchema = BaseSchema.extend({ + body: SearchQuerySchema.merge(DateFilterSchema).extend({ + /** Whether to include archived docs in results */ + includeArchived: z.boolean().optional(), - /** Whether to include drafts in results */ - includeDrafts: z.boolean().optional(), + /** Whether to include drafts in results */ + includeDrafts: z.boolean().optional(), - /** Filter results for team based on the collection */ - collectionId: z.string().uuid().optional(), + /** Filter results for team based on the collection */ + collectionId: z.string().uuid().optional(), - /** Filter results based on user */ - userId: z.string().uuid().optional(), + /** Filter results based on user */ + userId: z.string().uuid().optional(), - /** Filter results for the team derived from shareId */ - shareId: z - .string() - .refine((val) => isUUID(val) || SHARE_URL_SLUG_REGEX.test(val)) - .optional(), + /** Filter results for the team derived from shareId */ + shareId: z + .string() + .refine((val) => isUUID(val) || SHARE_URL_SLUG_REGEX.test(val)) + .optional(), - /** Min words to be shown in the results snippets */ - snippetMinWords: z.number().default(20), + /** Min words to be shown in the results snippets */ + snippetMinWords: z.number().default(20), - /** Max words to be accomodated in the results snippets */ - snippetMaxWords: z.number().default(30), + /** Max words to be accomodated in the results snippets */ + snippetMaxWords: z.number().default(30), + }), }); export type DocumentsSearchReq = z.infer; -export const DocumentsTemplatizeSchema = BaseIdSchema.extend({}); +export const DocumentsTemplatizeSchema = BaseSchema.extend({ + body: BaseIdSchema, +}); export type DocumentsTemplatizeReq = z.infer; -export const DocumentsUpdateSchema = BaseIdSchema.extend({ - /** Doc title to be updated */ - title: z.string().optional(), +export const DocumentsUpdateSchema = BaseSchema.extend({ + body: BaseIdSchema.extend({ + /** Doc title to be updated */ + title: z.string().optional(), - /** Doc text to be updated */ - text: z.string().optional(), + /** Doc text to be updated */ + text: z.string().optional(), - /** Boolean to denote if the doc should occupy full width */ - fullWidth: z.boolean().optional(), + /** Boolean to denote if the doc should occupy full width */ + fullWidth: z.boolean().optional(), - /** Boolean to denote if the doc should be published */ - publish: z.boolean().optional(), + /** Boolean to denote if the doc should be published */ + publish: z.boolean().optional(), - /** Revision to compare against document revision count */ - lastRevision: z.number().optional(), + /** Revision to compare against document revision count */ + lastRevision: z.number().optional(), - /** Doc template Id */ - templateId: z.string().uuid().nullish(), + /** Doc template Id */ + templateId: z.string().uuid().nullish(), - /** Doc collection Id */ - collectionId: z.string().uuid().optional(), + /** Doc collection Id */ + collectionId: z.string().uuid().optional(), - /** Boolean to denote if text should be appended */ - append: z.boolean().optional(), -}).refine((obj) => !(obj.append && !obj.text), { + /** Boolean to denote if text should be appended */ + append: z.boolean().optional(), + }), +}).refine((req) => !(req.body.append && !req.body.text), { message: "text is required while appending", }); export type DocumentsUpdateReq = z.infer; -export const DocumentsMoveSchema = BaseIdSchema.extend({ - /** Id of collection to which the doc is supposed to be moved */ - collectionId: z.string().uuid(), +export const DocumentsMoveSchema = BaseSchema.extend({ + body: BaseIdSchema.extend({ + /** Id of collection to which the doc is supposed to be moved */ + collectionId: z.string().uuid(), - /** Parent Id, in case if the doc is moved to a new parent */ - parentDocumentId: z.string().uuid().nullish(), + /** Parent Id, in case if the doc is moved to a new parent */ + parentDocumentId: z.string().uuid().nullish(), - /** Helps evaluate the new index in collection structure upon move */ - index: z.number().gte(0).optional(), -}).refine((obj) => !(obj.parentDocumentId === obj.id), { + /** Helps evaluate the new index in collection structure upon move */ + index: z.number().gte(0).optional(), + }), +}).refine((req) => !(req.body.parentDocumentId === req.body.id), { message: "infinite loop detected, cannot nest a document inside itself", }); export type DocumentsMoveReq = z.infer; -export const DocumentsArchiveSchema = BaseIdSchema.extend({}); +export const DocumentsArchiveSchema = BaseSchema.extend({ + body: BaseIdSchema, +}); export type DocumentsArchiveReq = z.infer; -export const DocumentsDeleteSchema = BaseIdSchema.extend({ - /** Whether to permanently delete the doc as opposed to soft-delete */ - permanent: z.boolean().optional(), +export const DocumentsDeleteSchema = BaseSchema.extend({ + body: BaseIdSchema.extend({ + /** Whether to permanently delete the doc as opposed to soft-delete */ + permanent: z.boolean().optional(), + }), }); export type DocumentsDeleteReq = z.infer; -export const DocumentsUnpublishSchema = BaseIdSchema.extend({}); +export const DocumentsUnpublishSchema = BaseSchema.extend({ + body: BaseIdSchema, +}); export type DocumentsUnpublishReq = z.infer; -export const DocumentsImportSchema = z.object({ - /** Whether to publish the imported docs. String due to multi-part form upload */ - publish: z.string().optional(), +export const DocumentsImportSchema = BaseSchema.extend({ + body: z.object({ + /** Whether to publish the imported docs. String as this is always multipart/form-data */ + publish: z.preprocess((val) => val === "true", z.boolean()).optional(), - /** Import docs to this collection */ - collectionId: z.string().uuid(), + /** Import docs to this collection */ + collectionId: z.string().uuid(), - /** Import under this parent doc */ - parentDocumentId: z.string().uuid().nullish(), + /** Import under this parent doc */ + parentDocumentId: z.string().uuid().nullish(), + }), }); export type DocumentsImportReq = z.infer; -export const DocumentsCreateSchema = z - .object({ +export const DocumentsCreateSchema = BaseSchema.extend({ + body: z.object({ /** Doc title */ title: z.string().default(""), @@ -257,14 +283,15 @@ export const DocumentsCreateSchema = z /** Whether to create a template doc */ template: z.boolean().optional(), - }) - .refine((obj) => !(obj.parentDocumentId && !obj.collectionId), { + }), +}) + .refine((req) => !(req.body.parentDocumentId && !req.body.collectionId), { message: "collectionId is required to create a nested document", }) - .refine((obj) => !(obj.template && !obj.collectionId), { + .refine((req) => !(req.body.template && !req.body.collectionId), { message: "collectionId is required to create a template document", }) - .refine((obj) => !(obj.publish && !obj.collectionId), { + .refine((req) => !(req.body.publish && !req.body.collectionId), { message: "collectionId is required to publish", }); diff --git a/server/routes/api/events/events.ts b/server/routes/api/events/events.ts index e66af0528..bbb6184b3 100644 --- a/server/routes/api/events/events.ts +++ b/server/routes/api/events/events.ts @@ -26,7 +26,7 @@ router.post( collectionId, name, auditLog, - } = ctx.input; + } = ctx.input.body; let where: WhereOptions = { name: Event.ACTIVITY_EVENTS, diff --git a/server/routes/api/events/schema.ts b/server/routes/api/events/schema.ts index 9462a7061..66b39248a 100644 --- a/server/routes/api/events/schema.ts +++ b/server/routes/api/events/schema.ts @@ -1,32 +1,35 @@ import { z } from "zod"; +import BaseSchema from "@server/routes/api/BaseSchema"; -export const EventsListSchema = z.object({ - /** Id of the user who performed the action */ - actorId: z.string().uuid().optional(), +export const EventsListSchema = BaseSchema.extend({ + body: z.object({ + /** Id of the user who performed the action */ + actorId: z.string().uuid().optional(), - /** Id of the document to filter the events for */ - documentId: z.string().uuid().optional(), + /** Id of the document to filter the events for */ + documentId: z.string().uuid().optional(), - /** Id of the collection to filter the events for */ - collectionId: z.string().uuid().optional(), + /** Id of the collection to filter the events for */ + collectionId: z.string().uuid().optional(), - /** Whether to include audit events */ - auditLog: z.boolean().default(false), + /** Whether to include audit events */ + auditLog: z.boolean().default(false), - /** Name of the event to retrieve */ - name: z.string().optional(), + /** Name of the event to retrieve */ + name: z.string().optional(), - /** The attribute to sort the events by */ - sort: z - .string() - .refine((val) => ["name", "createdAt"].includes(val)) - .default("createdAt"), + /** The attribute to sort the events by */ + sort: z + .string() + .refine((val) => ["name", "createdAt"].includes(val)) + .default("createdAt"), - /** The direction to sort the events */ - direction: z - .string() - .optional() - .transform((val) => (val !== "ASC" ? "DESC" : val)), + /** The direction to sort the events */ + direction: z + .string() + .optional() + .transform((val) => (val !== "ASC" ? "DESC" : val)), + }), }); export type EventsListReq = z.infer; diff --git a/server/types.ts b/server/types.ts index 7f81465d4..ec680a943 100644 --- a/server/types.ts +++ b/server/types.ts @@ -1,7 +1,9 @@ import { ParameterizedContext, DefaultContext } from "koa"; import { IRouterParamContext } from "koa-router"; import { Transaction } from "sequelize/types"; +import { z } from "zod"; import { Client } from "@shared/types"; +import BaseSchema from "@server/routes/api/BaseSchema"; import { AccountProvisionerResult } from "./commands/accountProvisioner"; import { FileOperation, Team, User } from "./models"; @@ -34,12 +36,17 @@ export type AppState = { export type AppContext = ParameterizedContext; -export interface APIContext, ResT = unknown> +export type BaseReq = z.infer; + +export type BaseRes = unknown; + +export interface APIContext extends ParameterizedContext< AppState, DefaultContext & IRouterParamContext, ResT > { + /** Typed and validated version of request, consisting of validated body, query, etc */ input: ReqT; }