chore: Refactor worker, emails and data cleanup to task system (#3337)

* Refactor worker, all emails on task system

* fix

* lint

* fix: Remove a bunch of expect-error comments in related tests

* refactor: Move work from utils.gc into tasks

* test

* Add tracing to tasks and processors
fix: DebounceProcessor triggering on all events
Event.add -> Event.schedule
This commit is contained in:
Tom Moor
2022-04-06 16:48:28 -07:00
committed by GitHub
parent 9c766362ed
commit dbfdcd6d23
41 changed files with 729 additions and 444 deletions

View File

@@ -4,6 +4,6 @@ export const globalEventQueue = createQueue("globalEvents");
export const processorEventQueue = createQueue("processorEvents");
export const websocketsQueue = createQueue("websockets");
export const websocketQueue = createQueue("websockets");
export const emailsQueue = createQueue("emails");
export const taskQueue = createQueue("tasks");

View File

@@ -1,9 +1,10 @@
import { Backlink } from "@server/models";
import { buildDocument } from "@server/test/factories";
import { flushdb } from "@server/test/support";
import BacklinksService from "./backlinks";
import BacklinksProcessor from "./BacklinksProcessor";
const ip = "127.0.0.1";
const Backlinks = new BacklinksService();
beforeEach(() => flushdb());
beforeEach(jest.resetAllMocks);
@@ -13,13 +14,16 @@ describe("documents.publish", () => {
const document = await buildDocument({
text: `[this is a link](${otherDocument.url})`,
});
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.publish"; doc... Remove this comment to see the full error message
await Backlinks.on({
const processor = new BacklinksProcessor();
await processor.perform({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
data: { title: document.title },
ip,
});
const backlinks = await Backlink.findAll({
where: {
@@ -38,13 +42,16 @@ describe("documents.publish", () => {
});
document.text = `[this is a link](${otherDocument.url})`;
await document.save();
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.publish"; doc... Remove this comment to see the full error message
await Backlinks.on({
const processor = new BacklinksProcessor();
await processor.perform({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
data: { title: document.title },
ip,
});
const backlinks = await Backlink.findAll({
where: {
@@ -61,13 +68,17 @@ describe("documents.update", () => {
const document = await buildDocument({
text: `[this is a link](${otherDocument.url})`,
});
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.update"; docu... Remove this comment to see the full error message
await Backlinks.on({
const processor = new BacklinksProcessor();
await processor.perform({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
createdAt: new Date().toISOString(),
data: { title: document.title, autosave: false, done: true },
ip,
});
const backlinks = await Backlink.findAll({
where: {
@@ -85,13 +96,17 @@ describe("documents.update", () => {
});
document.text = `[this is a link](${otherDocument.url})`;
await document.save();
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.update"; docu... Remove this comment to see the full error message
await Backlinks.on({
const processor = new BacklinksProcessor();
await processor.perform({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
createdAt: new Date().toISOString(),
data: { title: document.title, autosave: false, done: true },
ip,
});
const backlinks = await Backlink.findAll({
where: {
@@ -106,13 +121,17 @@ describe("documents.update", () => {
const document = await buildDocument();
document.text = `[this is a link](${otherDocument.url})`;
await document.save();
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.update"; docu... Remove this comment to see the full error message
await Backlinks.on({
const processor = new BacklinksProcessor();
await processor.perform({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
createdAt: new Date().toISOString(),
data: { title: document.title, autosave: false, done: true },
ip,
});
const backlinks = await Backlink.findAll({
where: {
@@ -130,25 +149,31 @@ describe("documents.update", () => {
[this is a another link](${yetAnotherDocument.url})`,
});
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.publish"; doc... Remove this comment to see the full error message
await Backlinks.on({
const processor = new BacklinksProcessor();
await processor.perform({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
data: { title: document.title },
ip,
});
document.text = `First link is gone
[this is a another link](${yetAnotherDocument.url})`;
await document.save();
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.update"; docu... Remove this comment to see the full error message
await Backlinks.on({
await processor.perform({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
createdAt: new Date().toISOString(),
data: { title: document.title, autosave: false, done: true },
ip,
});
const backlinks = await Backlink.findAll({
where: {
@@ -166,21 +191,27 @@ describe("documents.delete", () => {
const document = await buildDocument();
document.text = `[this is a link](${otherDocument.url})`;
await document.save();
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.update"; docu... Remove this comment to see the full error message
await Backlinks.on({
const processor = new BacklinksProcessor();
await processor.perform({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
createdAt: new Date().toISOString(),
data: { title: document.title, autosave: false, done: true },
ip,
});
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.delete"; docu... Remove this comment to see the full error message
await Backlinks.on({
await processor.perform({
name: "documents.delete",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
data: { title: document.title },
ip,
});
const backlinks = await Backlink.findAll({
where: {
@@ -201,29 +232,33 @@ describe("documents.title_change", () => {
document.text = `[${otherDocument.title}](${otherDocument.url})`;
await document.save();
// ensure the backlinks are created
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.update"; docu... Remove this comment to see the full error message
await Backlinks.on({
const processor = new BacklinksProcessor();
await processor.perform({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
createdAt: new Date().toISOString(),
data: { title: document.title, autosave: false, done: true },
ip,
});
// change the title of the linked doc
otherDocument.title = newTitle;
await otherDocument.save();
// does the text get updated with the new title
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.title_change"... Remove this comment to see the full error message
await Backlinks.on({
await processor.perform({
name: "documents.title_change",
documentId: otherDocument.id,
collectionId: otherDocument.collectionId,
teamId: otherDocument.teamId,
actorId: otherDocument.createdById,
createdAt: new Date().toISOString(),
data: {
previousTitle,
title: newTitle,
},
ip,
});
await document.reload();
expect(document.text).toBe(`[${newTitle}](${otherDocument.url})`);

View File

@@ -1,11 +1,21 @@
import { Op } from "sequelize";
import { APM } from "@server/logging/tracing";
import { Document, Backlink, Team } from "@server/models";
import { Event, DocumentEvent, RevisionEvent } from "@server/types";
import parseDocumentIds from "@server/utils/parseDocumentIds";
import slugify from "@server/utils/slugify";
import { DocumentEvent, RevisionEvent } from "../../types";
import BaseProcessor from "./BaseProcessor";
export default class BacklinksProcessor {
async on(event: DocumentEvent | RevisionEvent) {
@APM.trace()
export default class BacklinksProcessor extends BaseProcessor {
static applicableEvents: Event["name"][] = [
"documents.publish",
"documents.update",
"documents.title_change",
"documents.delete",
];
async perform(event: DocumentEvent | RevisionEvent) {
switch (event.name) {
case "documents.publish": {
const document = await Document.findByPk(event.documentId);

View File

@@ -0,0 +1,7 @@
import { Event } from "@server/types";
export default abstract class BaseProcessor {
static applicableEvents: Event["name"][] = [];
public abstract perform(event: Event): Promise<void>;
}

View File

@@ -1,9 +1,17 @@
import { APM } from "@server/logging/tracing";
import Document from "@server/models/Document";
import { globalEventQueue } from "../../queues";
import { Event } from "../../types";
import { Event } from "@server/types";
import { globalEventQueue } from "..";
import BaseProcessor from "./BaseProcessor";
export default class DebounceProcessor {
async on(event: Event) {
@APM.trace()
export default class DebounceProcessor extends BaseProcessor {
static applicableEvents: Event["name"][] = [
"documents.update",
"documents.update.delayed",
];
async perform(event: Event) {
switch (event.name) {
case "documents.update": {
globalEventQueue.add(

View File

@@ -1,14 +1,20 @@
import fs from "fs";
import invariant from "invariant";
import Logger from "@server/logging/logger";
import mailer from "@server/mailer";
import { FileOperation, Collection, Event, Team, User } from "@server/models";
import EmailTask from "@server/queues/tasks/EmailTask";
import { Event as TEvent } from "@server/types";
import { uploadToS3FromBuffer } from "@server/utils/s3";
import { archiveCollections } from "@server/utils/zip";
import BaseProcessor from "./BaseProcessor";
export default class ExportsProcessor {
async on(event: TEvent) {
export default class ExportsProcessor extends BaseProcessor {
static applicableEvents: TEvent["name"][] = [
"collections.export",
"collections.export_all",
];
async perform(event: TEvent) {
switch (event.name) {
case "collections.export":
case "collections.export_all": {
@@ -82,15 +88,21 @@ export default class ExportsProcessor {
});
if (state === "error") {
mailer.sendTemplate("exportFailure", {
to: user.email,
teamUrl: team.url,
await EmailTask.schedule({
type: "exportFailure",
options: {
to: user.email,
teamUrl: team.url,
},
});
} else {
mailer.sendTemplate("exportSuccess", {
to: user.email,
id: fileOperation.id,
teamUrl: team.url,
await EmailTask.schedule({
type: "exportSuccess",
options: {
to: user.email,
id: fileOperation.id,
teamUrl: team.url,
},
});
}
}
@@ -108,7 +120,7 @@ export default class ExportsProcessor {
data: Partial<FileOperation>
) {
await fileOperation.update(data);
await Event.add({
await Event.schedule({
name: "fileOperations.update",
teamId,
actorId,

View File

@@ -4,10 +4,13 @@ import File from "formidable/lib/file";
import invariant from "invariant";
import collectionImporter from "@server/commands/collectionImporter";
import { Event, FileOperation, Attachment, User } from "@server/models";
import { Event as TEvent } from "../../types";
import { Event as TEvent } from "@server/types";
import BaseProcessor from "./BaseProcessor";
export default class ImportsProcessor {
async on(event: TEvent) {
export default class ImportsProcessor extends BaseProcessor {
static applicableEvents: TEvent["name"][] = ["collections.import"];
async perform(event: TEvent) {
switch (event.name) {
case "collections.import": {
let state, error;
@@ -27,7 +30,7 @@ export default class ImportsProcessor {
teamId: user.teamId,
});
await Event.add({
await Event.schedule({
name: "fileOperations.create",
modelId: fileOperation.id,
teamId: user.teamId,
@@ -59,7 +62,7 @@ export default class ImportsProcessor {
error = err.message;
} finally {
await fileOperation.update({ state, error });
await Event.add({
await Event.schedule({
name: "fileOperations.update",
modelId: fileOperation.id,
teamId: user.teamId,

View File

@@ -1,16 +1,16 @@
import mailer from "@server/mailer";
import { View, NotificationSetting } from "@server/models";
import EmailTask from "@server/queues/tasks/EmailTask";
import {
buildDocument,
buildCollection,
buildUser,
} from "@server/test/factories";
import { flushdb } from "@server/test/support";
import NotificationsService from "./notifications";
import NotificationsProcessor from "./NotificationsProcessor";
jest.mock("@server/mailer");
jest.mock("@server/queues/tasks/EmailTask");
const ip = "127.0.0.1";
const Notifications = new NotificationsService();
beforeEach(() => flushdb());
beforeEach(jest.resetAllMocks);
@@ -26,18 +26,20 @@ describe("documents.publish", () => {
teamId: user.teamId,
event: "documents.publish",
});
await Notifications.on({
const processor = new NotificationsProcessor();
await processor.perform({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
ip: "127.0.0.1",
data: {
title: document.title,
},
ip,
});
expect(mailer.documentNotification).not.toHaveBeenCalled();
expect(EmailTask.schedule).not.toHaveBeenCalled();
});
test("should send a notification to other users in team", async () => {
@@ -51,18 +53,19 @@ describe("documents.publish", () => {
event: "documents.publish",
});
await Notifications.on({
const processor = new NotificationsProcessor();
await processor.perform({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
ip: "127.0.0.1",
data: {
title: document.title,
},
ip,
});
expect(mailer.documentNotification).toHaveBeenCalled();
expect(EmailTask.schedule).toHaveBeenCalled();
});
test("should not send a notification to users without collection access", async () => {
@@ -80,18 +83,19 @@ describe("documents.publish", () => {
teamId: user.teamId,
event: "documents.publish",
});
await Notifications.on({
const processor = new NotificationsProcessor();
await processor.perform({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
ip: "127.0.0.1",
data: {
title: document.title,
},
ip,
});
expect(mailer.documentNotification).not.toHaveBeenCalled();
expect(EmailTask.schedule).not.toHaveBeenCalled();
});
});
@@ -108,13 +112,14 @@ describe("revisions.create", () => {
teamId: collaborator.teamId,
event: "documents.update",
});
await Notifications.on({
const processor = new NotificationsProcessor();
await processor.perform({
name: "revisions.create",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
});
expect(mailer.documentNotification).toHaveBeenCalled();
expect(EmailTask.schedule).toHaveBeenCalled();
});
test("should not send a notification if viewed since update", async () => {
@@ -130,13 +135,15 @@ describe("revisions.create", () => {
event: "documents.update",
});
await View.touch(document.id, collaborator.id, true);
await Notifications.on({
const processor = new NotificationsProcessor();
await processor.perform({
name: "revisions.create",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
});
expect(mailer.documentNotification).not.toHaveBeenCalled();
expect(EmailTask.schedule).not.toHaveBeenCalled();
});
test("should not send a notification to last editor", async () => {
@@ -150,12 +157,13 @@ describe("revisions.create", () => {
teamId: user.teamId,
event: "documents.update",
});
await Notifications.on({
const processor = new NotificationsProcessor();
await processor.perform({
name: "revisions.create",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
});
expect(mailer.documentNotification).not.toHaveBeenCalled();
expect(EmailTask.schedule).not.toHaveBeenCalled();
});
});

View File

@@ -1,6 +1,6 @@
import { Op } from "sequelize";
import Logger from "@server/logging/logger";
import mailer from "@server/mailer";
import { APM } from "@server/logging/tracing";
import {
View,
Document,
@@ -15,9 +15,18 @@ import {
RevisionEvent,
Event,
} from "@server/types";
import EmailTask from "../tasks/EmailTask";
import BaseProcessor from "./BaseProcessor";
export default class NotificationsProcessor {
async on(event: Event) {
@APM.trace()
export default class NotificationsProcessor extends BaseProcessor {
static applicableEvents: Event["name"][] = [
"documents.publish",
"revisions.create",
"collections.create",
];
async perform(event: Event) {
switch (event.name) {
case "documents.publish":
case "revisions.create":
@@ -114,14 +123,17 @@ export default class NotificationsProcessor {
continue;
}
mailer.documentNotification({
to: setting.user.email,
eventName,
document,
team,
collection,
actor: document.updatedBy,
unsubscribeUrl: setting.unsubscribeUrl,
await EmailTask.schedule({
type: "documentNotification",
options: {
to: setting.user.email,
eventName,
documentId: document.id,
teamUrl: team.url,
actorName: document.updatedBy.name,
collectionName: collection.name,
unsubscribeUrl: setting.unsubscribeUrl,
},
});
}
}
@@ -165,12 +177,14 @@ export default class NotificationsProcessor {
continue;
}
mailer.collectionNotification({
to: setting.user.email,
eventName: "created",
collection,
actor: collection.user,
unsubscribeUrl: setting.unsubscribeUrl,
await EmailTask.schedule({
type: "collectionNotification",
options: {
to: setting.user.email,
eventName: "created",
collectionId: collection.id,
unsubscribeUrl: setting.unsubscribeUrl,
},
});
}
}

View File

@@ -1,22 +1,27 @@
import { Revision } from "@server/models";
import { buildDocument } from "@server/test/factories";
import { flushdb } from "@server/test/support";
import RevisionsService from "./revisions";
import RevisionsProcessor from "./RevisionsProcessor";
const ip = "127.0.0.1";
const Revisions = new RevisionsService();
beforeEach(() => flushdb());
beforeEach(jest.resetAllMocks);
describe("documents.update.debounced", () => {
test("should create a revision", async () => {
const document = await buildDocument();
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.update.deboun... Remove this comment to see the full error message
await Revisions.on({
const processor = new RevisionsProcessor();
await processor.perform({
name: "documents.update.debounced",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
createdAt: new Date().toISOString(),
data: { title: document.title, autosave: false, done: true },
ip,
});
const amount = await Revision.count({
where: {
@@ -29,13 +34,17 @@ describe("documents.update.debounced", () => {
test("should not create a revision if identical to previous", async () => {
const document = await buildDocument();
await Revision.createFromDocument(document);
// @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ name: "documents.update.deboun... Remove this comment to see the full error message
await Revisions.on({
const processor = new RevisionsProcessor();
await processor.perform({
name: "documents.update.debounced",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
createdAt: new Date().toISOString(),
data: { title: document.title, autosave: false, done: true },
ip,
});
const amount = await Revision.count({
where: {

View File

@@ -1,10 +1,15 @@
import invariant from "invariant";
import revisionCreator from "@server/commands/revisionCreator";
import { APM } from "@server/logging/tracing";
import { Revision, Document, User } from "@server/models";
import { DocumentEvent, RevisionEvent } from "../../types";
import { DocumentEvent, RevisionEvent, Event } from "@server/types";
import BaseProcessor from "./BaseProcessor";
export default class RevisionsProcessor {
async on(event: DocumentEvent | RevisionEvent) {
@APM.trace()
export default class RevisionsProcessor extends BaseProcessor {
static applicableEvents: Event["name"][] = ["documents.update.debounced"];
async perform(event: DocumentEvent | RevisionEvent) {
switch (event.name) {
case "documents.update.debounced": {
const document = await Document.findByPk(event.documentId);

View File

@@ -1,5 +1,6 @@
import fetch from "fetch-with-proxy";
import { Op } from "sequelize";
import { APM } from "@server/logging/tracing";
import { Document, Integration, Collection, Team } from "@server/models";
import { presentSlackAttachment } from "@server/presenters";
import {
@@ -7,10 +8,18 @@ import {
IntegrationEvent,
RevisionEvent,
Event,
} from "../../types";
} from "@server/types";
import BaseProcessor from "./BaseProcessor";
export default class SlackProcessor {
async on(event: Event) {
@APM.trace()
export default class SlackProcessor extends BaseProcessor {
static applicableEvents: Event["name"][] = [
"documents.publish",
"revisions.create",
"integrations.create",
];
async perform(event: Event) {
switch (event.name) {
case "documents.publish":
case "revisions.create":

View File

@@ -1,5 +1,7 @@
import { subHours } from "date-fns";
import { Op } from "sequelize";
import { Server } from "socket.io";
import { APM } from "@server/logging/tracing";
import {
Document,
Collection,
@@ -17,8 +19,9 @@ import {
} from "@server/presenters";
import { Event } from "../../types";
@APM.trace()
export default class WebsocketsProcessor {
async on(event: Event, socketio: any) {
async perform(event: Event, socketio: Server) {
switch (event.name) {
case "documents.publish":
case "documents.restore":
@@ -604,6 +607,7 @@ export default class WebsocketsProcessor {
}
default:
return;
}
}
}

View File

@@ -1,12 +0,0 @@
import mailer, { EmailSendOptions, EmailTypes } from "../../mailer";
type EmailEvent = {
type: EmailTypes;
opts: EmailSendOptions;
};
export default class EmailsProcessor {
async on(event: EmailEvent) {
await mailer[event.type](event.opts);
}
}

View File

@@ -0,0 +1,16 @@
import { requireDirectory } from "@server/utils/fs";
const processors = {};
requireDirectory(__dirname).forEach(([module, id]) => {
// @ts-expect-error ts-migrate(2339) FIXME: Property 'default' does not exist on type 'unknown'
const { default: Processor } = module;
if (id === "index") {
return;
}
processors[id] = Processor;
});
export default processors;

View File

@@ -0,0 +1,36 @@
import { JobOptions } from "bull";
import { taskQueue } from "../";
export enum TaskPriority {
Background = 40,
Low = 30,
Normal = 20,
High = 10,
}
export default abstract class BaseTask<T> {
public static schedule<T>(props: T) {
// @ts-expect-error cannot create an instance of an abstract class, we wont
const task = new this();
return taskQueue.add(
{
name: this.name,
props,
},
task.options
);
}
public abstract perform(props: T): Promise<void>;
public get options(): JobOptions {
return {
priority: TaskPriority.Normal,
attempts: 5,
backoff: {
type: "exponential",
delay: 60 * 1000,
},
};
}
}

View File

@@ -0,0 +1,72 @@
import { subDays } from "date-fns";
import { Document } from "@server/models";
import { buildDocument } from "@server/test/factories";
import { flushdb } from "@server/test/support";
import CleanupDeletedDocumentsTask from "./CleanupDeletedDocumentsTask";
beforeEach(() => flushdb());
describe("CleanupDeletedDocumentsTask", () => {
it("should not destroy documents not deleted", async () => {
await buildDocument({
publishedAt: new Date(),
});
const task = new CleanupDeletedDocumentsTask();
await task.perform({ limit: 100 });
expect(
await Document.unscoped().count({
paranoid: false,
})
).toEqual(1);
});
it("should not destroy documents deleted less than 30 days ago", async () => {
await buildDocument({
publishedAt: new Date(),
deletedAt: subDays(new Date(), 25),
});
const task = new CleanupDeletedDocumentsTask();
await task.perform({ limit: 100 });
expect(
await Document.unscoped().count({
paranoid: false,
})
).toEqual(1);
});
it("should destroy documents deleted more than 30 days ago", async () => {
await buildDocument({
publishedAt: new Date(),
deletedAt: subDays(new Date(), 60),
});
const task = new CleanupDeletedDocumentsTask();
await task.perform({ limit: 100 });
expect(
await Document.unscoped().count({
paranoid: false,
})
).toEqual(0);
});
it("should destroy draft documents deleted more than 30 days ago", async () => {
await buildDocument({
publishedAt: undefined,
deletedAt: subDays(new Date(), 60),
});
const task = new CleanupDeletedDocumentsTask();
await task.perform({ limit: 100 });
expect(
await Document.unscoped().count({
paranoid: false,
})
).toEqual(0);
});
});

View File

@@ -0,0 +1,40 @@
import { subDays } from "date-fns";
import { Op } from "sequelize";
import documentPermanentDeleter from "@server/commands/documentPermanentDeleter";
import Logger from "@server/logging/logger";
import { APM } from "@server/logging/tracing";
import { Document } from "@server/models";
import BaseTask, { TaskPriority } from "./BaseTask";
type Props = {
limit: number;
};
@APM.trace()
export default class CleanupDeletedDocumentsTask extends BaseTask<Props> {
public async perform({ limit }: Props) {
Logger.info(
"task",
`Permanently destroying upto ${limit} documents older than 30 days…`
);
const documents = await Document.scope("withDrafts").findAll({
attributes: ["id", "teamId", "text", "deletedAt"],
where: {
deletedAt: {
[Op.lt]: subDays(new Date(), 30),
},
},
paranoid: false,
limit,
});
const countDeletedDocument = await documentPermanentDeleter(documents);
Logger.info("task", `Destroyed ${countDeletedDocument} documents`);
}
public get options() {
return {
attempts: 1,
priority: TaskPriority.Background,
};
}
}

View File

@@ -0,0 +1,42 @@
import { subDays } from "date-fns";
import { Op } from "sequelize";
import teamPermanentDeleter from "@server/commands/teamPermanentDeleter";
import Logger from "@server/logging/logger";
import { APM } from "@server/logging/tracing";
import { Team } from "@server/models";
import BaseTask, { TaskPriority } from "./BaseTask";
type Props = {
limit: number;
};
@APM.trace()
export default class CleanupDeletedTeamsTask extends BaseTask<Props> {
public async perform({ limit }: Props) {
Logger.info(
"task",
`Permanently destroying upto ${limit} teams older than 30 days…`
);
const teams = await Team.findAll({
where: {
deletedAt: {
[Op.lt]: subDays(new Date(), 30),
},
},
paranoid: false,
limit,
});
for (const team of teams) {
await teamPermanentDeleter(team);
}
Logger.info("task", `Destroyed ${teams.length} teams`);
}
public get options() {
return {
attempts: 1,
priority: TaskPriority.Background,
};
}
}

View File

@@ -0,0 +1,56 @@
import { subDays } from "date-fns";
import { FileOperation } from "@server/models";
import { buildFileOperation } from "@server/test/factories";
import { flushdb } from "@server/test/support";
import CleanupExpiredFileOperationsTask from "./CleanupExpiredFileOperationsTask";
beforeEach(() => flushdb());
describe("CleanupExpiredFileOperationsTask", () => {
it("should expire exports older than 30 days ago", async () => {
await buildFileOperation({
type: "export",
state: "complete",
createdAt: subDays(new Date(), 30),
});
await buildFileOperation({
type: "export",
state: "complete",
});
/* This is a test helper that creates a new task and runs it. */
const task = new CleanupExpiredFileOperationsTask();
await task.perform({ limit: 100 });
const data = await FileOperation.count({
where: {
type: "export",
state: "expired",
},
});
expect(data).toEqual(1);
});
it("should not expire exports made less than 30 days ago", async () => {
await buildFileOperation({
type: "export",
state: "complete",
createdAt: subDays(new Date(), 29),
});
await buildFileOperation({
type: "export",
state: "complete",
});
const task = new CleanupExpiredFileOperationsTask();
await task.perform({ limit: 100 });
const data = await FileOperation.count({
where: {
type: "export",
state: "expired",
},
});
expect(data).toEqual(0);
});
});

View File

@@ -0,0 +1,40 @@
import { subDays } from "date-fns";
import { Op } from "sequelize";
import Logger from "@server/logging/logger";
import { APM } from "@server/logging/tracing";
import { FileOperation } from "@server/models";
import BaseTask, { TaskPriority } from "./BaseTask";
type Props = {
limit: number;
};
@APM.trace()
export default class CleanupExpiredFileOperationsTask extends BaseTask<Props> {
public async perform({ limit }: Props) {
Logger.info("task", `Expiring export file operations older than 30 days…`);
const fileOperations = await FileOperation.unscoped().findAll({
where: {
type: "export",
createdAt: {
[Op.lt]: subDays(new Date(), 30),
},
state: {
[Op.ne]: "expired",
},
},
limit,
});
await Promise.all(
fileOperations.map((fileOperation) => fileOperation.expire())
);
Logger.info("task", `Expired ${fileOperations.length} file operations`);
}
public get options() {
return {
attempts: 1,
priority: TaskPriority.Background,
};
}
}

View File

@@ -0,0 +1,15 @@
import { APM } from "@server/logging/tracing";
import mailer, { EmailSendOptions, EmailTypes } from "../../mailer";
import BaseTask from "./BaseTask";
type Props = {
type: EmailTypes;
options: EmailSendOptions;
};
@APM.trace()
export default class EmailTask extends BaseTask<Props> {
public async perform(props: Props) {
await mailer[props.type](props.options);
}
}

View File

@@ -0,0 +1,16 @@
import { requireDirectory } from "@server/utils/fs";
const tasks = {};
requireDirectory(__dirname).forEach(([module, id]) => {
// @ts-expect-error ts-migrate(2339) FIXME: Property 'default' does not exist on type 'unknown'
const { default: Task } = module;
if (id === "index") {
return;
}
tasks[id] = Task;
});
export default tasks;