chore: Refactoring event processors and service architecture (#2495)

This commit is contained in:
Tom Moor
2021-08-27 21:42:13 -07:00
committed by GitHub
parent 86f008293a
commit 28aef82af9
61 changed files with 1610 additions and 1498 deletions

7
server/queues/index.js Normal file
View File

@@ -0,0 +1,7 @@
// @flow
import { createQueue } from "../utils/queue";
export const globalEventQueue = createQueue("globalEvents");
export const processorEventQueue = createQueue("processorEvents");
export const websocketsQueue = createQueue("websockets");
export const emailsQueue = createQueue("emails");

View File

@@ -0,0 +1,132 @@
// @flow
import { Document, Backlink } from "../../models";
import { Op } from "../../sequelize";
import type { DocumentEvent, RevisionEvent } from "../../types";
import parseDocumentIds from "../../utils/parseDocumentIds";
import slugify from "../../utils/slugify";
export default class BacklinksProcessor {
async on(event: DocumentEvent | RevisionEvent) {
switch (event.name) {
case "documents.publish": {
const document = await Document.findByPk(event.documentId);
if (!document) return;
const linkIds = parseDocumentIds(document.text);
await Promise.all(
linkIds.map(async (linkId) => {
const linkedDocument = await Document.findByPk(linkId);
if (!linkedDocument || linkedDocument.id === event.documentId) {
return;
}
await Backlink.findOrCreate({
where: {
documentId: linkedDocument.id,
reverseDocumentId: event.documentId,
},
defaults: {
userId: document.lastModifiedById,
},
});
})
);
break;
}
case "documents.update": {
const document = await Document.findByPk(event.documentId);
if (!document) return;
// backlinks are only created for published documents
if (!document.publishedAt) return;
const linkIds = parseDocumentIds(document.text);
const linkedDocumentIds = [];
// create or find existing backlink records for referenced docs
await Promise.all(
linkIds.map(async (linkId) => {
const linkedDocument = await Document.findByPk(linkId);
if (!linkedDocument || linkedDocument.id === event.documentId) {
return;
}
await Backlink.findOrCreate({
where: {
documentId: linkedDocument.id,
reverseDocumentId: event.documentId,
},
defaults: {
userId: document.lastModifiedById,
},
});
linkedDocumentIds.push(linkedDocument.id);
})
);
// delete any backlinks that no longer exist
await Backlink.destroy({
where: {
documentId: {
[Op.notIn]: linkedDocumentIds,
},
reverseDocumentId: event.documentId,
},
});
break;
}
case "documents.title_change": {
const document = await Document.findByPk(event.documentId);
if (!document) return;
// might as well check
const { title, previousTitle } = event.data;
if (!previousTitle || title === previousTitle) break;
// update any link titles in documents that lead to this one
const backlinks = await Backlink.findAll({
where: {
documentId: event.documentId,
},
include: [{ model: Document, as: "reverseDocument" }],
});
await Promise.all(
backlinks.map(async (backlink) => {
const previousUrl = `/doc/${slugify(previousTitle)}-${
document.urlId
}`;
// find links in the other document that lead to this one and have
// the old title as anchor text. Go ahead and update those to the
// new title automatically
backlink.reverseDocument.text = backlink.reverseDocument.text.replace(
`[${previousTitle}](${previousUrl})`,
`[${title}](${document.url})`
);
await backlink.reverseDocument.save({
silent: true,
hooks: false,
});
})
);
break;
}
case "documents.delete": {
await Backlink.destroy({
where: {
[Op.or]: [
{ reverseDocumentId: event.documentId },
{ documentId: event.documentId },
],
},
});
break;
}
default:
}
}
}

View File

@@ -0,0 +1,242 @@
/* eslint-disable flowtype/require-valid-file-annotation */
import { Backlink } from "../../models";
import { buildDocument } from "../../test/factories";
import { flushdb } from "../../test/support";
import BacklinksService from "./backlinks";
const Backlinks = new BacklinksService();
beforeEach(() => flushdb());
beforeEach(jest.resetAllMocks);
describe("documents.publish", () => {
test("should create new backlink records", async () => {
const otherDocument = await buildDocument();
const document = await buildDocument({
text: `[this is a link](${otherDocument.url})`,
});
await Backlinks.on({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
const backlinks = await Backlink.findAll({
where: { reverseDocumentId: document.id },
});
expect(backlinks.length).toBe(1);
});
test("should not fail when linked document is destroyed", async () => {
const otherDocument = await buildDocument();
await otherDocument.destroy();
const document = await buildDocument({
version: null,
text: `[ ] checklist item`,
});
document.text = `[this is a link](${otherDocument.url})`;
await document.save();
await Backlinks.on({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
const backlinks = await Backlink.findAll({
where: { reverseDocumentId: document.id },
});
expect(backlinks.length).toBe(0);
});
});
describe("documents.update", () => {
test("should not fail on a document with no previous revisions", async () => {
const otherDocument = await buildDocument();
const document = await buildDocument({
text: `[this is a link](${otherDocument.url})`,
});
await Backlinks.on({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
const backlinks = await Backlink.findAll({
where: { reverseDocumentId: document.id },
});
expect(backlinks.length).toBe(1);
});
test("should not fail when previous revision is different document version", async () => {
const otherDocument = await buildDocument();
const document = await buildDocument({
version: null,
text: `[ ] checklist item`,
});
document.text = `[this is a link](${otherDocument.url})`;
await document.save();
await Backlinks.on({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
const backlinks = await Backlink.findAll({
where: { reverseDocumentId: document.id },
});
expect(backlinks.length).toBe(1);
});
test("should create new backlink records", async () => {
const otherDocument = await buildDocument();
const document = await buildDocument();
document.text = `[this is a link](${otherDocument.url})`;
await document.save();
await Backlinks.on({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
const backlinks = await Backlink.findAll({
where: { reverseDocumentId: document.id },
});
expect(backlinks.length).toBe(1);
});
test("should destroy removed backlink records", async () => {
const otherDocument = await buildDocument();
const yetAnotherDocument = await buildDocument();
const document = await buildDocument({
text: `[this is a link](${otherDocument.url})
[this is a another link](${yetAnotherDocument.url})`,
});
await Backlinks.on({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
document.text = `First link is gone
[this is a another link](${yetAnotherDocument.url})`;
await document.save();
await Backlinks.on({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
const backlinks = await Backlink.findAll({
where: { reverseDocumentId: document.id },
});
expect(backlinks.length).toBe(1);
expect(backlinks[0].documentId).toBe(yetAnotherDocument.id);
});
});
describe("documents.delete", () => {
test("should destroy related backlinks", async () => {
const otherDocument = await buildDocument();
const document = await buildDocument();
document.text = `[this is a link](${otherDocument.url})`;
await document.save();
await Backlinks.on({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
await Backlinks.on({
name: "documents.delete",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
const backlinks = await Backlink.findAll({
where: { reverseDocumentId: document.id },
});
expect(backlinks.length).toBe(0);
});
});
describe("documents.title_change", () => {
test("should update titles in backlinked documents", async () => {
const newTitle = "test";
const document = await buildDocument();
const otherDocument = await buildDocument();
const previousTitle = otherDocument.title;
// create a doc with a link back
document.text = `[${otherDocument.title}](${otherDocument.url})`;
await document.save();
// ensure the backlinks are created
await Backlinks.on({
name: "documents.update",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
// change the title of the linked doc
otherDocument.title = newTitle;
await otherDocument.save();
// does the text get updated with the new title
await Backlinks.on({
name: "documents.title_change",
documentId: otherDocument.id,
collectionId: otherDocument.collectionId,
teamId: otherDocument.teamId,
actorId: otherDocument.createdById,
data: {
previousTitle,
title: newTitle,
},
});
await document.reload();
expect(document.text).toBe(`[${newTitle}](${otherDocument.url})`);
});
});

View File

@@ -0,0 +1,45 @@
// @flow
import { Document } from "../../models";
import { globalEventQueue } from "../../queues";
import type { Event } from "../../types";
export default class DebounceProcessor {
async on(event: Event) {
switch (event.name) {
case "documents.update": {
globalEventQueue.add(
{
...event,
name: "documents.update.delayed",
},
{
delay: 5 * 60 * 1000,
removeOnComplete: true,
}
);
break;
}
case "documents.update.delayed": {
const document = await Document.findByPk(event.documentId);
// If the document has been deleted then prevent further processing
if (!document) return;
// If the document has been updated since we initially queued the delayed
// event then abort, there must be another updated event in the queue
// this functions as a simple distributed debounce.
if (document.updatedAt > new Date(event.createdAt)) return;
globalEventQueue.add(
{
...event,
name: "documents.update.debounced",
},
{ removeOnComplete: true }
);
break;
}
default:
}
}
}

View File

@@ -0,0 +1,14 @@
// @flow
import mailer, { type EmailSendOptions, type EmailTypes } from "../../mailer";
type EmailEvent = {
type: EmailTypes,
opts: EmailSendOptions,
};
export default class EmailsProcessor {
async on(event: EmailEvent) {
// $FlowIssue flow rightly doesn't like dynaic values
await mailer[event.type](event.opts);
}
}

View File

@@ -0,0 +1,42 @@
// @flow
import fs from "fs";
import os from "os";
import File from "formidable/lib/file";
import collectionImporter from "../../commands/collectionImporter";
import { Attachment, User } from "../../models";
import type { Event } from "../../types";
export default class ImportsProcessor {
async on(event: Event) {
switch (event.name) {
case "collections.import": {
const { type } = event.data;
const attachment = await Attachment.findByPk(event.modelId);
const user = await User.findByPk(event.actorId);
const buffer = await attachment.buffer;
const tmpDir = os.tmpdir();
const tmpFilePath = `${tmpDir}/upload-${event.modelId}`;
await fs.promises.writeFile(tmpFilePath, buffer);
const file = new File({
name: attachment.name,
type: attachment.type,
path: tmpFilePath,
});
await collectionImporter({
file,
user,
type,
ip: event.ip,
});
await attachment.destroy();
return;
}
default:
}
}
}

View File

@@ -0,0 +1,164 @@
// @flow
import debug from "debug";
import mailer from "../../mailer";
import {
View,
Document,
Team,
Collection,
User,
NotificationSetting,
} from "../../models";
import { Op } from "../../sequelize";
import type { DocumentEvent, CollectionEvent, Event } from "../../types";
const log = debug("services");
export default class NotificationsProcessor {
async on(event: Event) {
switch (event.name) {
case "documents.publish":
case "documents.update.debounced":
return this.documentUpdated(event);
case "collections.create":
return this.collectionCreated(event);
default:
}
}
async documentUpdated(event: DocumentEvent) {
// never send notifications when batch importing documents
if (event.data && event.data.source === "import") return;
const document = await Document.findByPk(event.documentId);
if (!document) return;
const { collection } = document;
if (!collection) return;
const team = await Team.findByPk(document.teamId);
if (!team) return;
const notificationSettings = await NotificationSetting.findAll({
where: {
userId: {
[Op.ne]: document.lastModifiedById,
},
teamId: document.teamId,
event:
event.name === "documents.publish"
? "documents.publish"
: "documents.update",
},
include: [
{
model: User,
required: true,
as: "user",
},
],
});
const eventName =
event.name === "documents.publish" ? "published" : "updated";
for (const setting of notificationSettings) {
// Suppress notifications for suspended users
if (setting.user.isSuspended) {
continue;
}
// For document updates we only want to send notifications if
// the document has been edited by the user with this notification setting
// This could be replaced with ability to "follow" in the future
if (
eventName === "updated" &&
!document.collaboratorIds.includes(setting.userId)
) {
continue;
}
// Check the user has access to the collection this document is in. Just
// because they were a collaborator once doesn't mean they still are.
const collectionIds = await setting.user.collectionIds();
if (!collectionIds.includes(document.collectionId)) {
continue;
}
// If this user has viewed the document since the last update was made
// then we can avoid sending them a useless notification, yay.
const view = await View.findOne({
where: {
userId: setting.userId,
documentId: event.documentId,
updatedAt: {
[Op.gt]: document.updatedAt,
},
},
});
if (view) {
log(
`suppressing notification to ${setting.userId} because update viewed`
);
continue;
}
mailer.documentNotification({
to: setting.user.email,
eventName,
document,
team,
collection,
actor: document.updatedBy,
unsubscribeUrl: setting.unsubscribeUrl,
});
}
}
async collectionCreated(event: CollectionEvent) {
const collection = await Collection.findByPk(event.collectionId, {
include: [
{
model: User,
required: true,
as: "user",
},
],
});
if (!collection) return;
if (!collection.permission) return;
const notificationSettings = await NotificationSetting.findAll({
where: {
userId: {
[Op.ne]: collection.createdById,
},
teamId: collection.teamId,
event: event.name,
},
include: [
{
model: User,
required: true,
as: "user",
},
],
});
for (const setting of notificationSettings) {
// Suppress notifications for suspended users
if (setting.user.isSuspended) {
continue;
}
mailer.collectionNotification({
to: setting.user.email,
eventName: "created",
collection,
actor: collection.user,
unsubscribeUrl: setting.unsubscribeUrl,
});
}
}
}

View File

@@ -0,0 +1,168 @@
/* eslint-disable flowtype/require-valid-file-annotation */
import mailer from "../../mailer";
import { View, NotificationSetting } from "../../models";
import {
buildDocument,
buildCollection,
buildUser,
} from "../../test/factories";
import { flushdb } from "../../test/support";
import NotificationsService from "./notifications";
jest.mock("../../mailer");
const Notifications = new NotificationsService();
beforeEach(() => flushdb());
beforeEach(jest.resetAllMocks);
describe("documents.publish", () => {
test("should not send a notification to author", async () => {
const user = await buildUser();
const document = await buildDocument({
teamId: user.teamId,
lastModifiedById: user.id,
});
await NotificationSetting.create({
userId: user.id,
teamId: user.teamId,
event: "documents.publish",
});
await Notifications.on({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
expect(mailer.documentNotification).not.toHaveBeenCalled();
});
test("should send a notification to other users in team", async () => {
const user = await buildUser();
const document = await buildDocument({
teamId: user.teamId,
});
await NotificationSetting.create({
userId: user.id,
teamId: user.teamId,
event: "documents.publish",
});
await Notifications.on({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
expect(mailer.documentNotification).toHaveBeenCalled();
});
test("should not send a notification to users without collection access", async () => {
const user = await buildUser();
const collection = await buildCollection({
teamId: user.teamId,
permission: null,
});
const document = await buildDocument({
teamId: user.teamId,
collectionId: collection.id,
});
await NotificationSetting.create({
userId: user.id,
teamId: user.teamId,
event: "documents.publish",
});
await Notifications.on({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
expect(mailer.documentNotification).not.toHaveBeenCalled();
});
});
describe("documents.update.debounced", () => {
test("should send a notification to other collaborator", async () => {
const document = await buildDocument();
const collaborator = await buildUser({ teamId: document.teamId });
document.collaboratorIds = [collaborator.id];
await document.save();
await NotificationSetting.create({
userId: collaborator.id,
teamId: collaborator.teamId,
event: "documents.update",
});
await Notifications.on({
name: "documents.update.debounced",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
expect(mailer.documentNotification).toHaveBeenCalled();
});
test("should not send a notification if viewed since update", async () => {
const document = await buildDocument();
const collaborator = await buildUser({ teamId: document.teamId });
document.collaboratorIds = [collaborator.id];
await document.save();
await NotificationSetting.create({
userId: collaborator.id,
teamId: collaborator.teamId,
event: "documents.update",
});
await View.touch(document.id, collaborator.id, true);
await Notifications.on({
name: "documents.update.debounced",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
expect(mailer.documentNotification).not.toHaveBeenCalled();
});
test("should not send a notification to last editor", async () => {
const user = await buildUser();
const document = await buildDocument({
teamId: user.teamId,
lastModifiedById: user.id,
});
await NotificationSetting.create({
userId: user.id,
teamId: user.teamId,
event: "documents.update",
});
await Notifications.on({
name: "documents.update.debounced",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
expect(mailer.documentNotification).not.toHaveBeenCalled();
});
});

View File

@@ -0,0 +1,37 @@
// @flow
import invariant from "invariant";
import revisionCreator from "../../commands/revisionCreator";
import { Revision, Document, User } from "../../models";
import type { DocumentEvent, RevisionEvent } from "../../types";
export default class RevisionsProcessor {
async on(event: DocumentEvent | RevisionEvent) {
switch (event.name) {
case "documents.publish":
case "documents.update.debounced": {
const document = await Document.findByPk(event.documentId);
invariant(document, "Document should exist");
const previous = await Revision.findLatest(document.id);
// we don't create revisions if identical to previous revision, this can
// happen if a manual revision was created from another service or user.
if (
previous &&
document.text === previous.text &&
document.title === previous.title
) {
return;
}
const user = await User.findByPk(event.actorId);
invariant(user, "User should exist");
await revisionCreator({ user, document });
break;
}
default:
}
}
}

View File

@@ -0,0 +1,61 @@
/* eslint-disable flowtype/require-valid-file-annotation */
import { Revision } from "../../models";
import { buildDocument } from "../../test/factories";
import { flushdb } from "../../test/support";
import RevisionsService from "./revisions";
const Revisions = new RevisionsService();
beforeEach(() => flushdb());
beforeEach(jest.resetAllMocks);
describe("documents.publish", () => {
test("should create a revision", async () => {
const document = await buildDocument();
await Revisions.on({
name: "documents.publish",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
const amount = await Revision.count({ where: { documentId: document.id } });
expect(amount).toBe(1);
});
});
describe("documents.update.debounced", () => {
test("should create a revision", async () => {
const document = await buildDocument();
await Revisions.on({
name: "documents.update.debounced",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
const amount = await Revision.count({ where: { documentId: document.id } });
expect(amount).toBe(1);
});
test("should not create a revision if identical to previous", async () => {
const document = await buildDocument();
await Revision.createFromDocument(document);
await Revisions.on({
name: "documents.update.debounced",
documentId: document.id,
collectionId: document.collectionId,
teamId: document.teamId,
actorId: document.createdById,
});
const amount = await Revision.count({ where: { documentId: document.id } });
expect(amount).toBe(1);
});
});

View File

@@ -0,0 +1,99 @@
// @flow
import fetch from "fetch-with-proxy";
import { Document, Integration, Collection, Team } from "../../models";
import { presentSlackAttachment } from "../../presenters";
import type { DocumentEvent, IntegrationEvent, Event } from "../../types";
export default class SlackProcessor {
async on(event: Event) {
switch (event.name) {
case "documents.publish":
case "documents.update.debounced":
return this.documentUpdated(event);
case "integrations.create":
return this.integrationCreated(event);
default:
}
}
async integrationCreated(event: IntegrationEvent) {
const integration = await Integration.findOne({
where: {
id: event.modelId,
service: "slack",
type: "post",
},
include: [
{
model: Collection,
required: true,
as: "collection",
},
],
});
if (!integration) return;
const collection = integration.collection;
if (!collection) return;
await fetch(integration.settings.url, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
text: `👋 Hey there! When documents are published or updated in the *${collection.name}* collection on Outline they will be posted to this channel!`,
attachments: [
{
color: collection.color,
title: collection.name,
title_link: `${process.env.URL}${collection.url}`,
text: collection.description,
},
],
}),
});
}
async documentUpdated(event: DocumentEvent) {
// never send notifications when batch importing documents
if (event.data && event.data.source === "import") return;
const document = await Document.findByPk(event.documentId);
if (!document) return;
// never send notifications for draft documents
if (!document.publishedAt) return;
const integration = await Integration.findOne({
where: {
teamId: document.teamId,
collectionId: document.collectionId,
service: "slack",
type: "post",
},
});
if (!integration) return;
const team = await Team.findByPk(document.teamId);
let text = `${document.updatedBy.name} updated a document`;
if (event.name === "documents.publish") {
text = `${document.createdBy.name} published a new document`;
}
await fetch(integration.settings.url, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
text,
attachments: [
presentSlackAttachment(document, document.collection, team),
],
}),
});
}
}

View File

@@ -0,0 +1,498 @@
// @flow
import { subHours } from "date-fns";
import {
Document,
Collection,
Group,
CollectionGroup,
GroupUser,
} from "../../models";
import { Op } from "../../sequelize";
import type { Event } from "../../types";
export default class WebsocketsProcessor {
async on(event: Event, socketio: any) {
switch (event.name) {
case "documents.publish":
case "documents.restore":
case "documents.archive":
case "documents.unarchive": {
const document = await Document.findByPk(event.documentId, {
paranoid: false,
});
const channel = document.publishedAt
? `collection-${document.collectionId}`
: `user-${event.actorId}`;
return socketio.to(channel).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
collectionIds: [
{
id: document.collectionId,
},
],
});
}
case "documents.delete": {
const document = await Document.findByPk(event.documentId, {
paranoid: false,
});
if (!document.publishedAt) {
return socketio.to(`user-${document.createdById}`).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
});
}
return socketio
.to(`collection-${document.collectionId}`)
.emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
collectionIds: [
{
id: document.collectionId,
},
],
});
}
case "documents.permanent_delete": {
return socketio
.to(`collection-${event.collectionId}`)
.emit(event.name, {
documentId: event.documentId,
});
}
case "documents.pin":
case "documents.unpin":
case "documents.update": {
const document = await Document.findByPk(event.documentId, {
paranoid: false,
});
const channel = document.publishedAt
? `collection-${document.collectionId}`
: `user-${event.actorId}`;
return socketio.to(channel).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
});
}
case "documents.create": {
const document = await Document.findByPk(event.documentId);
return socketio.to(`user-${event.actorId}`).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
collectionIds: [
{
id: document.collectionId,
},
],
});
}
case "documents.star":
case "documents.unstar": {
return socketio.to(`user-${event.actorId}`).emit(event.name, {
documentId: event.documentId,
});
}
case "documents.move": {
const documents = await Document.findAll({
where: {
id: event.data.documentIds,
},
paranoid: false,
});
documents.forEach((document) => {
socketio.to(`collection-${document.collectionId}`).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
});
});
event.data.collectionIds.forEach((collectionId) => {
socketio.to(`collection-${collectionId}`).emit("entities", {
event: event.name,
collectionIds: [{ id: collectionId }],
});
});
return;
}
case "collections.create": {
const collection = await Collection.findByPk(event.collectionId, {
paranoid: false,
});
socketio
.to(
collection.permission
? `team-${collection.teamId}`
: `collection-${collection.id}`
)
.emit("entities", {
event: event.name,
collectionIds: [
{
id: collection.id,
updatedAt: collection.updatedAt,
},
],
});
return socketio
.to(
collection.permission
? `team-${collection.teamId}`
: `collection-${collection.id}`
)
.emit("join", {
event: event.name,
collectionId: collection.id,
});
}
case "collections.update":
case "collections.delete": {
const collection = await Collection.findByPk(event.collectionId, {
paranoid: false,
});
return socketio.to(`team-${collection.teamId}`).emit("entities", {
event: event.name,
collectionIds: [
{
id: collection.id,
updatedAt: collection.updatedAt,
},
],
});
}
case "collections.move": {
return socketio
.to(`collection-${event.collectionId}`)
.emit("collections.update_index", {
collectionId: event.collectionId,
index: event.data.index,
});
}
case "collections.add_user": {
// the user being added isn't yet in the websocket channel for the collection
// so they need to be notified separately
socketio.to(`user-${event.userId}`).emit(event.name, {
event: event.name,
userId: event.userId,
collectionId: event.collectionId,
});
// let everyone with access to the collection know a user was added
socketio.to(`collection-${event.collectionId}`).emit(event.name, {
event: event.name,
userId: event.userId,
collectionId: event.collectionId,
});
// tell any user clients to connect to the websocket channel for the collection
return socketio.to(`user-${event.userId}`).emit("join", {
event: event.name,
collectionId: event.collectionId,
});
}
case "collections.remove_user": {
const membershipUserIds = await Collection.membershipUserIds(
event.collectionId
);
if (membershipUserIds.includes(event.userId)) {
// Even though we just removed a user from the collection
// the user still has access through some means
// treat this like an add, so that the client re-syncs policies
socketio.to(`user-${event.userId}`).emit("collections.add_user", {
event: "collections.add_user",
userId: event.userId,
collectionId: event.collectionId,
});
} else {
// let everyone with access to the collection know a user was removed
socketio
.to(`collection-${event.collectionId}`)
.emit("collections.remove_user", {
event: event.name,
userId: event.userId,
collectionId: event.collectionId,
});
// tell any user clients to disconnect from the websocket channel for the collection
socketio.to(`user-${event.userId}`).emit("leave", {
event: event.name,
collectionId: event.collectionId,
});
}
return;
}
case "collections.add_group": {
const group = await Group.findByPk(event.data.groupId);
// the users being added are not yet in the websocket channel for the collection
// so they need to be notified separately
for (const groupMembership of group.groupMemberships) {
socketio
.to(`user-${groupMembership.userId}`)
.emit("collections.add_user", {
event: event.name,
userId: groupMembership.userId,
collectionId: event.collectionId,
});
// tell any user clients to connect to the websocket channel for the collection
socketio.to(`user-${groupMembership.userId}`).emit("join", {
event: event.name,
collectionId: event.collectionId,
});
}
return;
}
case "collections.remove_group": {
const group = await Group.findByPk(event.data.groupId);
const membershipUserIds = await Collection.membershipUserIds(
event.collectionId
);
for (const groupMembership of group.groupMemberships) {
if (membershipUserIds.includes(groupMembership.userId)) {
// the user still has access through some means...
// treat this like an add, so that the client re-syncs policies
socketio
.to(`user-${groupMembership.userId}`)
.emit("collections.add_user", {
event: event.name,
userId: groupMembership.userId,
collectionId: event.collectionId,
});
} else {
// let users in the channel know they were removed
socketio
.to(`user-${groupMembership.userId}`)
.emit("collections.remove_user", {
event: event.name,
userId: groupMembership.userId,
collectionId: event.collectionId,
});
// tell any user clients to disconnect to the websocket channel for the collection
socketio.to(`user-${groupMembership.userId}`).emit("leave", {
event: event.name,
collectionId: event.collectionId,
});
}
}
return;
}
case "groups.create":
case "groups.update": {
const group = await Group.findByPk(event.modelId, {
paranoid: false,
});
return socketio.to(`team-${group.teamId}`).emit("entities", {
event: event.name,
groupIds: [
{
id: group.id,
updatedAt: group.updatedAt,
},
],
});
}
case "groups.add_user": {
// do an add user for every collection that the group is a part of
const collectionGroupMemberships = await CollectionGroup.findAll({
where: { groupId: event.modelId },
});
for (const collectionGroup of collectionGroupMemberships) {
// the user being added isn't yet in the websocket channel for the collection
// so they need to be notified separately
socketio.to(`user-${event.userId}`).emit("collections.add_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
// let everyone with access to the collection know a user was added
socketio
.to(`collection-${collectionGroup.collectionId}`)
.emit("collections.add_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
// tell any user clients to connect to the websocket channel for the collection
return socketio.to(`user-${event.userId}`).emit("join", {
event: event.name,
collectionId: collectionGroup.collectionId,
});
}
return;
}
case "groups.remove_user": {
const collectionGroupMemberships = await CollectionGroup.findAll({
where: { groupId: event.modelId },
});
for (const collectionGroup of collectionGroupMemberships) {
// if the user has any memberships remaining on the collection
// we need to emit add instead of remove
const collection = await Collection.scope({
method: ["withMembership", event.userId],
}).findByPk(collectionGroup.collectionId);
if (!collection) {
continue;
}
const hasMemberships =
collection.memberships.length > 0 ||
collection.collectionGroupMemberships.length > 0;
if (hasMemberships) {
// the user still has access through some means...
// treat this like an add, so that the client re-syncs policies
socketio.to(`user-${event.userId}`).emit("collections.add_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
} else {
// let everyone with access to the collection know a user was removed
socketio
.to(`collection-${collectionGroup.collectionId}`)
.emit("collections.remove_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
// tell any user clients to disconnect from the websocket channel for the collection
socketio.to(`user-${event.userId}`).emit("leave", {
event: event.name,
collectionId: collectionGroup.collectionId,
});
}
}
return;
}
case "groups.delete": {
const group = await Group.findByPk(event.modelId, {
paranoid: false,
});
socketio.to(`team-${group.teamId}`).emit("entities", {
event: event.name,
groupIds: [
{
id: group.id,
updatedAt: group.updatedAt,
},
],
});
// we the users and collection relations that were just severed as a result of the group deletion
// since there are cascading deletes, we approximate this by looking for the recently deleted
// items in the GroupUser and CollectionGroup tables
const groupUsers = await GroupUser.findAll({
paranoid: false,
where: {
groupId: event.modelId,
deletedAt: {
[Op.gt]: subHours(new Date(), 1),
},
},
});
const collectionGroupMemberships = await CollectionGroup.findAll({
paranoid: false,
where: {
groupId: event.modelId,
deletedAt: {
[Op.gt]: subHours(new Date(), 1),
},
},
});
for (const collectionGroup of collectionGroupMemberships) {
const membershipUserIds = await Collection.membershipUserIds(
collectionGroup.collectionId
);
for (const groupUser of groupUsers) {
if (membershipUserIds.includes(groupUser.userId)) {
// the user still has access through some means...
// treat this like an add, so that the client re-syncs policies
socketio
.to(`user-${groupUser.userId}`)
.emit("collections.add_user", {
event: event.name,
userId: groupUser.userId,
collectionId: collectionGroup.collectionId,
});
} else {
// let everyone with access to the collection know a user was removed
socketio
.to(`collection-${collectionGroup.collectionId}`)
.emit("collections.remove_user", {
event: event.name,
userId: groupUser.userId,
collectionId: collectionGroup.collectionId,
});
// tell any user clients to disconnect from the websocket channel for the collection
socketio.to(`user-${groupUser.userId}`).emit("leave", {
event: event.name,
collectionId: collectionGroup.collectionId,
});
}
}
}
return;
}
default:
}
}
}