chore: Refactoring event processors and service architecture (#2495)

This commit is contained in:
Tom Moor
2021-08-27 21:42:13 -07:00
committed by GitHub
parent 86f008293a
commit 28aef82af9
61 changed files with 1610 additions and 1498 deletions

View File

@@ -104,7 +104,7 @@ MAXIMUM_IMPORT_SIZE=5120000
# You may enable or disable debugging categories to increase the noisiness of # You may enable or disable debugging categories to increase the noisiness of
# logs. The default is a good balance # logs. The default is a good balance
DEBUG=cache,presenters,events,emails,mailer,utils,http,server,services DEBUG=cache,presenters,events,emails,mailer,utils,http,server,processors
# Comma separated list of domains to be allowed to signin to the wiki. If not # Comma separated list of domains to be allowed to signin to the wiki. If not
# set, all domains are allowed by default when using Google OAuth to signin # set, all domains are allowed by default when using Google OAuth to signin

View File

@@ -1,4 +1,3 @@
# Architecture # Architecture
Outline is composed of a backend and frontend codebase in this monorepo. As both are written in Javascript, they share some code where possible. We utilize the latest ES6 language features, including `async`/`await`, and [Flow](https://flow.org/) typing. Prettier formatting and ESLint are enforced by CI. Outline is composed of a backend and frontend codebase in this monorepo. As both are written in Javascript, they share some code where possible. We utilize the latest ES6 language features, including `async`/`await`, and [Flow](https://flow.org/) typing. Prettier formatting and ESLint are enforced by CI.
@@ -46,7 +45,9 @@ server
├── onboarding - Markdown templates for onboarding documents ├── onboarding - Markdown templates for onboarding documents
├── policies - Authorization logic based on cancan ├── policies - Authorization logic based on cancan
├── presenters - JSON presenters for database models, the interface between backend -> frontend ├── presenters - JSON presenters for database models, the interface between backend -> frontend
├── services - Service definitions are triggered for events and perform async jobs ├── queues - Async queue definitions
│ └── processors - Processors perform async jobs, usually working on events from the event bus
├── services - Services start distinct portions of the application eg api, worker
├── static - Static assets ├── static - Static assets
├── test - Test helpers and fixtures, tests themselves are colocated ├── test - Test helpers and fixtures, tests themselves are colocated
└── utils - Utility methods specific to the backend └── utils - Utility methods specific to the backend

View File

@@ -1 +1,2 @@
web: node ./build/server/index.js web: node ./build/server/index.js --services=web,websockets
worker: node ./build/server/index.js --services=worker

View File

@@ -2,6 +2,7 @@
declare var process: { declare var process: {
exit: (code?: number) => void, exit: (code?: number) => void,
cwd: () => string, cwd: () => string,
argv: Array<string>,
env: { env: {
[string]: string, [string]: string,
}, },

View File

@@ -5,12 +5,12 @@
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
"clean": "rimraf build", "clean": "rimraf build",
"build:i18n": "i18next 'app/**/*.js' 'server/**/*.js' && mkdir -p ./build/shared/i18n && cp -R ./shared/i18n/locales ./build/shared/i18n", "build:i18n": "i18next --silent 'app/**/*.js' 'server/**/*.js' && mkdir -p ./build/shared/i18n && cp -R ./shared/i18n/locales ./build/shared/i18n",
"build:server": "babel -d ./build/server ./server && babel -d ./build/shared ./shared && cp package.json ./build && ln -sf \"$(pwd)/webpack.config.dev.js\" ./build", "build:server": "babel -d ./build/server ./server && babel -d ./build/shared ./shared && cp package.json ./build && ln -sf \"$(pwd)/webpack.config.dev.js\" ./build",
"build:webpack": "webpack --config webpack.config.prod.js", "build:webpack": "webpack --config webpack.config.prod.js",
"build": "yarn clean && yarn build:webpack && yarn build:i18n && yarn build:server", "build": "yarn clean && yarn build:webpack && yarn build:i18n && yarn build:server",
"start": "node ./build/server/index.js", "start": "node ./build/server/index.js",
"dev": "nodemon --exec \"yarn build:server && yarn build:i18n && node --inspect=0.0.0.0 build/server/index.js\" -e js --ignore build/ --ignore app/ --ignore flow-typed/", "dev": "nodemon --exec \"yarn build:server && yarn build:i18n && node build/server/index.js\" -e js --ignore build/ --ignore app/ --ignore flow-typed/",
"lint": "eslint app server shared", "lint": "eslint app server shared",
"deploy": "git push heroku master", "deploy": "git push heroku master",
"prepare": "yarn yarn-deduplicate yarn.lock", "prepare": "yarn yarn-deduplicate yarn.lock",

View File

@@ -1,7 +1,7 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app";
import { Attachment } from "../models"; import { Attachment } from "../models";
import webService from "../services/web";
import { import {
buildUser, buildUser,
buildAdmin, buildAdmin,
@@ -11,6 +11,7 @@ import {
} from "../test/factories"; } from "../test/factories";
import { flushdb } from "../test/support"; import { flushdb } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
jest.mock("aws-sdk", () => { jest.mock("aws-sdk", () => {

View File

@@ -1,9 +1,9 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app"; import webService from "../services/web";
import { buildUser, buildTeam } from "../test/factories"; import { buildUser, buildTeam } from "../test/factories";
import { flushdb } from "../test/support"; import { flushdb } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,10 +1,11 @@
// @flow // @flow
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import { v4 as uuidv4 } from "uuid"; import { v4 as uuidv4 } from "uuid";
import app from "../app"; import webService from "../services/web";
import { buildUser, buildAdmin, buildTeam } from "../test/factories"; import { buildUser, buildAdmin, buildTeam } from "../test/factories";
import { flushdb } from "../test/support"; import { flushdb } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,7 +1,7 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app";
import { Document, CollectionUser, CollectionGroup } from "../models"; import { Document, CollectionUser, CollectionGroup } from "../models";
import webService from "../services/web";
import { import {
buildUser, buildUser,
buildAdmin, buildAdmin,
@@ -11,6 +11,7 @@ import {
} from "../test/factories"; } from "../test/factories";
import { flushdb, seed } from "../test/support"; import { flushdb, seed } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());
@@ -262,7 +263,7 @@ describe("#collections.move", () => {
}); });
describe("#collections.export", () => { describe("#collections.export", () => {
it("should now allow export of private collection not a member", async () => { it("should not allow export of private collection not a member", async () => {
const { user } = await seed(); const { user } = await seed();
const collection = await buildCollection({ const collection = await buildCollection({
permission: null, permission: null,

View File

@@ -1,6 +1,5 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app";
import { import {
Document, Document,
View, View,
@@ -10,6 +9,7 @@ import {
CollectionUser, CollectionUser,
SearchQuery, SearchQuery,
} from "../models"; } from "../models";
import webService from "../services/web";
import { import {
buildShare, buildShare,
buildCollection, buildCollection,
@@ -17,7 +17,7 @@ import {
buildDocument, buildDocument,
} from "../test/factories"; } from "../test/factories";
import { flushdb, seed } from "../test/support"; import { flushdb, seed } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,9 +1,9 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app"; import webService from "../services/web";
import { buildEvent, buildUser } from "../test/factories"; import { buildEvent, buildUser } from "../test/factories";
import { flushdb, seed } from "../test/support"; import { flushdb, seed } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,10 +1,10 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app";
import { Event } from "../models"; import { Event } from "../models";
import webService from "../services/web";
import { buildUser, buildAdmin, buildGroup } from "../test/factories"; import { buildUser, buildAdmin, buildGroup } from "../test/factories";
import { flushdb } from "../test/support"; import { flushdb } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,11 +1,12 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app";
import { IntegrationAuthentication, SearchQuery } from "../models"; import { IntegrationAuthentication, SearchQuery } from "../models";
import webService from "../services/web";
import * as Slack from "../slack"; import * as Slack from "../slack";
import { buildDocument, buildIntegration } from "../test/factories"; import { buildDocument, buildIntegration } from "../test/factories";
import { flushdb, seed } from "../test/support"; import { flushdb, seed } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,7 +1,8 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app"; import webService from "../services/web";
import { flushdb } from "../test/support"; import { flushdb } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,8 +1,8 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../../app"; import webService from "../../services/web";
import { flushdb, seed } from "../../test/support"; import { flushdb, seed } from "../../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,10 +1,11 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app";
import { Revision } from "../models"; import { Revision } from "../models";
import webService from "../services/web";
import { buildDocument, buildUser } from "../test/factories"; import { buildDocument, buildUser } from "../test/factories";
import { flushdb, seed } from "../test/support"; import { flushdb, seed } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,10 +1,11 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app";
import { CollectionUser } from "../models"; import { CollectionUser } from "../models";
import webService from "../services/web";
import { buildUser, buildDocument, buildShare } from "../test/factories"; import { buildUser, buildDocument, buildShare } from "../test/factories";
import { flushdb, seed } from "../test/support"; import { flushdb, seed } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,9 +1,9 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app"; import webService from "../services/web";
import { flushdb, seed } from "../test/support"; import { flushdb, seed } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,11 +1,11 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app"; import webService from "../services/web";
import { buildTeam, buildAdmin, buildUser } from "../test/factories"; import { buildTeam, buildAdmin, buildUser } from "../test/factories";
import { flushdb, seed } from "../test/support"; import { flushdb, seed } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,11 +1,12 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import { subDays } from "date-fns"; import { subDays } from "date-fns";
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app";
import { Document } from "../models"; import { Document } from "../models";
import webService from "../services/web";
import { buildDocument } from "../test/factories"; import { buildDocument } from "../test/factories";
import { flushdb } from "../test/support"; import { flushdb } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
jest.mock("aws-sdk", () => { jest.mock("aws-sdk", () => {

View File

@@ -1,10 +1,11 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../app";
import { View, CollectionUser } from "../models"; import { View, CollectionUser } from "../models";
import webService from "../services/web";
import { buildUser } from "../test/factories"; import { buildUser } from "../test/factories";
import { flushdb, seed } from "../test/support"; import { flushdb, seed } from "../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

View File

@@ -1,213 +0,0 @@
// @flow
import * as Sentry from "@sentry/node";
import debug from "debug";
import Koa from "koa";
import compress from "koa-compress";
import helmet, {
contentSecurityPolicy,
dnsPrefetchControl,
referrerPolicy,
} from "koa-helmet";
import logger from "koa-logger";
import mount from "koa-mount";
import onerror from "koa-onerror";
import enforceHttps from "koa-sslify";
import api from "./api";
import auth from "./auth";
import emails from "./emails";
import env from "./env";
import routes from "./routes";
import updates from "./utils/updates";
const app = new Koa();
const isProduction = process.env.NODE_ENV === "production";
const isTest = process.env.NODE_ENV === "test";
const log = debug("http");
// Construct scripts CSP based on services in use by this installation
const defaultSrc = ["'self'"];
const scriptSrc = [
"'self'",
"'unsafe-inline'",
"'unsafe-eval'",
"gist.github.com",
];
if (env.GOOGLE_ANALYTICS_ID) {
scriptSrc.push("www.google-analytics.com");
}
if (env.CDN_URL) {
scriptSrc.push(env.CDN_URL);
defaultSrc.push(env.CDN_URL);
}
app.use(compress());
if (isProduction) {
// Force redirect to HTTPS protocol unless explicitly disabled
if (process.env.FORCE_HTTPS !== "false") {
app.use(
enforceHttps({
trustProtoHeader: true,
})
);
} else {
console.warn("Enforced https was disabled with FORCE_HTTPS env variable");
}
// trust header fields set by our proxy. eg X-Forwarded-For
app.proxy = true;
} else if (!isTest) {
/* eslint-disable global-require */
const convert = require("koa-convert");
const webpack = require("webpack");
const devMiddleware = require("koa-webpack-dev-middleware");
const hotMiddleware = require("koa-webpack-hot-middleware");
const config = require("../webpack.config.dev");
const compile = webpack(config);
/* eslint-enable global-require */
const middleware = devMiddleware(compile, {
// display no info to console (only warnings and errors)
noInfo: true,
// display nothing to the console
quiet: false,
watchOptions: {
poll: 1000,
ignored: ["node_modules", "flow-typed", "server", "build", "__mocks__"],
},
// public path to bind the middleware to
// use the same as in webpack
publicPath: config.output.publicPath,
// options for formatting the statistics
stats: {
colors: true,
},
});
app.use(async (ctx, next) => {
ctx.webpackConfig = config;
ctx.devMiddleware = middleware;
await next();
});
app.use(convert(middleware));
app.use(
convert(
hotMiddleware(compile, {
log: console.log, // eslint-disable-line
path: "/__webpack_hmr",
heartbeat: 10 * 1000,
})
)
);
app.use(mount("/emails", emails));
}
// redirect routing logger to optional "http" debug
app.use(
logger((str, args) => {
log(str);
})
);
// catch errors in one place, automatically set status and response headers
onerror(app);
if (env.SENTRY_DSN) {
Sentry.init({
dsn: env.SENTRY_DSN,
environment: env.ENVIRONMENT,
release: env.RELEASE,
maxBreadcrumbs: 0,
ignoreErrors: [
// emitted by Koa when bots attempt to snoop on paths such as wp-admin
// or the user client submits a bad request. These are expected in normal
// running of the application and don't need to be reported.
"BadRequestError",
"UnauthorizedError",
],
});
}
app.on("error", (error, ctx) => {
// we don't need to report every time a request stops to the bug tracker
if (error.code === "EPIPE" || error.code === "ECONNRESET") {
console.warn("Connection error", { error });
return;
}
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
const requestId = ctx.headers["x-request-id"];
if (requestId) {
scope.setTag("request_id", requestId);
}
const authType = ctx.state ? ctx.state.authType : undefined;
if (authType) {
scope.setTag("auth_type", authType);
}
const userId =
ctx.state && ctx.state.user ? ctx.state.user.id : undefined;
if (userId) {
scope.setUser({ id: userId });
}
scope.addEventProcessor(function (event) {
return Sentry.Handlers.parseRequest(event, ctx.request);
});
Sentry.captureException(error);
});
} else {
console.error(error);
}
});
app.use(mount("/auth", auth));
app.use(mount("/api", api));
// Sets common security headers by default, such as no-sniff, hsts, hide powered
// by etc, these are applied after auth and api so they are only returned on
// standard non-XHR accessed routes
app.use(async (ctx, next) => {
ctx.set("Permissions-Policy", "interest-cohort=()");
await next();
});
app.use(helmet());
app.use(
contentSecurityPolicy({
directives: {
defaultSrc,
scriptSrc,
styleSrc: ["'self'", "'unsafe-inline'", "github.githubassets.com"],
imgSrc: ["*", "data:", "blob:"],
frameSrc: ["*"],
connectSrc: ["*"],
// Do not use connect-src: because self + websockets does not work in
// Safari, ref: https://bugs.webkit.org/show_bug.cgi?id=201591
},
})
);
// Allow DNS prefetching for performance, we do not care about leaking requests
// to our own CDN's
app.use(dnsPrefetchControl({ allow: true }));
app.use(referrerPolicy({ policy: "no-referrer" }));
app.use(mount(routes));
/**
* Production updates and anonymous analytics.
*
* Set ENABLE_UPDATES=false to disable them for your installation
*/
if (process.env.ENABLE_UPDATES !== "false" && isProduction) {
updates();
setInterval(updates, 24 * 3600 * 1000);
}
export default app;

View File

@@ -4,7 +4,7 @@ import Router from "koa-router";
import { find } from "lodash"; import { find } from "lodash";
import { parseDomain, isCustomSubdomain } from "../../../shared/utils/domains"; import { parseDomain, isCustomSubdomain } from "../../../shared/utils/domains";
import { AuthorizationError } from "../../errors"; import { AuthorizationError } from "../../errors";
import mailer, { sendEmail } from "../../mailer"; import mailer from "../../mailer";
import errorHandling from "../../middlewares/errorHandling"; import errorHandling from "../../middlewares/errorHandling";
import methodOverride from "../../middlewares/methodOverride"; import methodOverride from "../../middlewares/methodOverride";
import validation from "../../middlewares/validation"; import validation from "../../middlewares/validation";
@@ -108,7 +108,7 @@ router.post("email", errorHandling(), async (ctx) => {
} }
// send email to users registered address with a short-lived token // send email to users registered address with a short-lived token
mailer.signin({ await mailer.sendTemplate("signin", {
to: user.email, to: user.email,
token: user.getEmailSigninToken(), token: user.getEmailSigninToken(),
teamUrl: team.url, teamUrl: team.url,
@@ -138,7 +138,10 @@ router.get("email.callback", async (ctx) => {
return ctx.redirect("/?notice=suspended"); return ctx.redirect("/?notice=suspended");
} }
if (user.isInvited) { if (user.isInvited) {
sendEmail("welcome", user.email, { teamUrl: user.team.url }); await mailer.sendTemplate("welcome", {
to: user.email,
teamUrl: user.team.url,
});
} }
await user.update({ lastActiveAt: new Date() }); await user.update({ lastActiveAt: new Date() });

View File

@@ -1,10 +1,11 @@
// @flow // @flow
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "../../app";
import mailer from "../../mailer"; import mailer from "../../mailer";
import webService from "../../services/web";
import { buildUser, buildGuestUser, buildTeam } from "../../test/factories"; import { buildUser, buildGuestUser, buildTeam } from "../../test/factories";
import { flushdb } from "../../test/support"; import { flushdb } from "../../test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
jest.mock("../../mailer"); jest.mock("../../mailer");
@@ -13,7 +14,7 @@ beforeEach(async () => {
await flushdb(); await flushdb();
// $FlowFixMe does not understand Jest mocks // $FlowFixMe does not understand Jest mocks
mailer.signin.mockReset(); mailer.sendTemplate.mockReset();
}); });
afterAll(() => server.close()); afterAll(() => server.close());
@@ -39,7 +40,7 @@ describe("email", () => {
expect(res.status).toEqual(200); expect(res.status).toEqual(200);
expect(body.redirect).toMatch("slack"); expect(body.redirect).toMatch("slack");
expect(mailer.signin).not.toHaveBeenCalled(); expect(mailer.sendTemplate).not.toHaveBeenCalled();
}); });
it("should respond with redirect location when user is SSO enabled on another subdomain", async () => { it("should respond with redirect location when user is SSO enabled on another subdomain", async () => {
@@ -60,7 +61,7 @@ describe("email", () => {
expect(res.status).toEqual(200); expect(res.status).toEqual(200);
expect(body.redirect).toMatch("slack"); expect(body.redirect).toMatch("slack");
expect(mailer.signin).not.toHaveBeenCalled(); expect(mailer.sendTemplate).not.toHaveBeenCalled();
}); });
it("should respond with success when user is not SSO enabled", async () => { it("should respond with success when user is not SSO enabled", async () => {
@@ -73,7 +74,7 @@ describe("email", () => {
expect(res.status).toEqual(200); expect(res.status).toEqual(200);
expect(body.success).toEqual(true); expect(body.success).toEqual(true);
expect(mailer.signin).toHaveBeenCalled(); expect(mailer.sendTemplate).toHaveBeenCalled();
}); });
it("should respond with success regardless of whether successful to prevent crawling email logins", async () => { it("should respond with success regardless of whether successful to prevent crawling email logins", async () => {
@@ -84,7 +85,7 @@ describe("email", () => {
expect(res.status).toEqual(200); expect(res.status).toEqual(200);
expect(body.success).toEqual(true); expect(body.success).toEqual(true);
expect(mailer.signin).not.toHaveBeenCalled(); expect(mailer.sendTemplate).not.toHaveBeenCalled();
}); });
describe("with multiple users matching email", () => { describe("with multiple users matching email", () => {
@@ -108,7 +109,7 @@ describe("email", () => {
expect(res.status).toEqual(200); expect(res.status).toEqual(200);
expect(body.redirect).toMatch("slack"); expect(body.redirect).toMatch("slack");
expect(mailer.signin).not.toHaveBeenCalled(); expect(mailer.sendTemplate).not.toHaveBeenCalled();
}); });
it("should default to current subdomain with guest email", async () => { it("should default to current subdomain with guest email", async () => {
@@ -131,7 +132,7 @@ describe("email", () => {
expect(res.status).toEqual(200); expect(res.status).toEqual(200);
expect(body.success).toEqual(true); expect(body.success).toEqual(true);
expect(mailer.signin).toHaveBeenCalled(); expect(mailer.sendTemplate).toHaveBeenCalled();
}); });
it("should default to custom domain with SSO", async () => { it("should default to custom domain with SSO", async () => {
@@ -151,7 +152,7 @@ describe("email", () => {
expect(res.status).toEqual(200); expect(res.status).toEqual(200);
expect(body.redirect).toMatch("slack"); expect(body.redirect).toMatch("slack");
expect(mailer.signin).not.toHaveBeenCalled(); expect(mailer.sendTemplate).not.toHaveBeenCalled();
}); });
it("should default to custom domain with guest email", async () => { it("should default to custom domain with guest email", async () => {
@@ -171,7 +172,7 @@ describe("email", () => {
expect(res.status).toEqual(200); expect(res.status).toEqual(200);
expect(body.success).toEqual(true); expect(body.success).toEqual(true);
expect(mailer.signin).toHaveBeenCalled(); expect(mailer.sendTemplate).toHaveBeenCalled();
}); });
}); });
}); });

View File

@@ -6,7 +6,7 @@ import {
EmailAuthenticationRequiredError, EmailAuthenticationRequiredError,
AuthenticationProviderDisabledError, AuthenticationProviderDisabledError,
} from "../errors"; } from "../errors";
import { sendEmail } from "../mailer"; import mailer from "../mailer";
import { Collection, Team, User } from "../models"; import { Collection, Team, User } from "../models";
import teamCreator from "./teamCreator"; import teamCreator from "./teamCreator";
import userCreator from "./userCreator"; import userCreator from "./userCreator";
@@ -87,7 +87,10 @@ export default async function accountProvisioner({
const { isNewUser, user } = result; const { isNewUser, user } = result;
if (isNewUser) { if (isNewUser) {
sendEmail("welcome", user.email, { teamUrl: team.url }); await mailer.sendTemplate("welcome", {
to: user.email,
teamUrl: team.url,
});
} }
if (isNewUser || isNewTeam) { if (isNewUser || isNewTeam) {

View File

@@ -1,5 +1,5 @@
// @flow // @flow
import { sendEmail } from "../mailer"; import mailer from "../mailer";
import { Collection, UserAuthentication } from "../models"; import { Collection, UserAuthentication } from "../models";
import { buildUser, buildTeam } from "../test/factories"; import { buildUser, buildTeam } from "../test/factories";
import { flushdb } from "../test/support"; import { flushdb } from "../test/support";
@@ -17,7 +17,7 @@ jest.mock("aws-sdk", () => {
beforeEach(() => { beforeEach(() => {
// $FlowFixMe // $FlowFixMe
sendEmail.mockReset(); mailer.sendTemplate.mockReset();
return flushdb(); return flushdb();
}); });
@@ -59,7 +59,7 @@ describe("accountProvisioner", () => {
expect(user.email).toEqual("jenny@example.com"); expect(user.email).toEqual("jenny@example.com");
expect(isNewUser).toEqual(true); expect(isNewUser).toEqual(true);
expect(isNewTeam).toEqual(true); expect(isNewTeam).toEqual(true);
expect(sendEmail).toHaveBeenCalled(); expect(mailer.sendTemplate).toHaveBeenCalled();
const collectionCount = await Collection.count(); const collectionCount = await Collection.count();
expect(collectionCount).toEqual(1); expect(collectionCount).toEqual(1);
@@ -104,7 +104,7 @@ describe("accountProvisioner", () => {
expect(user.email).toEqual(newEmail); expect(user.email).toEqual(newEmail);
expect(isNewTeam).toEqual(false); expect(isNewTeam).toEqual(false);
expect(isNewUser).toEqual(false); expect(isNewUser).toEqual(false);
expect(sendEmail).not.toHaveBeenCalled(); expect(mailer.sendTemplate).not.toHaveBeenCalled();
const collectionCount = await Collection.count(); const collectionCount = await Collection.count();
expect(collectionCount).toEqual(0); expect(collectionCount).toEqual(0);
@@ -187,7 +187,7 @@ describe("accountProvisioner", () => {
expect(auth.scopes[0]).toEqual("read"); expect(auth.scopes[0]).toEqual("read");
expect(user.email).toEqual("jenny@example.com"); expect(user.email).toEqual("jenny@example.com");
expect(isNewUser).toEqual(true); expect(isNewUser).toEqual(true);
expect(sendEmail).toHaveBeenCalled(); expect(mailer.sendTemplate).toHaveBeenCalled();
// should provision welcome collection // should provision welcome collection
const collectionCount = await Collection.count(); const collectionCount = await Collection.count();

View File

@@ -7,6 +7,7 @@ import { getAllowedDomains } from "../utils/authentication";
import { generateAvatarUrl } from "../utils/avatars"; import { generateAvatarUrl } from "../utils/avatars";
const log = debug("server"); const log = debug("server");
type TeamCreatorResult = {| type TeamCreatorResult = {|
team: Team, team: Team,
authenticationProvider: AuthenticationProvider, authenticationProvider: AuthenticationProvider,

View File

@@ -34,7 +34,7 @@ describe("teamCreator", () => {
expect(isNewTeam).toEqual(true); expect(isNewTeam).toEqual(true);
}); });
it("should now allow creating multiple teams in installation", async () => { it("should not allow creating multiple teams in installation", async () => {
await buildTeam(); await buildTeam();
let error; let error;

View File

@@ -54,6 +54,7 @@ export default async function userInviter({
service: null, service: null,
}); });
users.push(newUser); users.push(newUser);
await Event.create({ await Event.create({
name: "users.invite", name: "users.invite",
actorId: user.id, actorId: user.id,
@@ -64,7 +65,8 @@ export default async function userInviter({
}, },
ip, ip,
}); });
await mailer.invite({
await mailer.sendTemplate("invite", {
to: invite.email, to: invite.email,
name: invite.name, name: invite.name,
actorName: user.name, actorName: user.name,

View File

@@ -1,18 +1,4 @@
// @flow // @flow
require("dotenv").config({ silent: true });
// Note: This entire object is stringified in the HTML exposed to the client export default process.env;
// do not add anything here that should be a secret or password
export default {
URL: process.env.URL,
CDN_URL: process.env.CDN_URL || "",
DEPLOYMENT: process.env.DEPLOYMENT,
ENVIRONMENT: process.env.NODE_ENV,
SENTRY_DSN: process.env.SENTRY_DSN,
TEAM_LOGO: process.env.TEAM_LOGO,
SLACK_KEY: process.env.SLACK_KEY,
SLACK_APP_ID: process.env.SLACK_APP_ID,
MAXIMUM_IMPORT_SIZE: process.env.MAXIMUM_IMPORT_SIZE || 1024 * 1000 * 5,
SUBDOMAINS_ENABLED: process.env.SUBDOMAINS_ENABLED === "true",
GOOGLE_ANALYTICS_ID: process.env.GOOGLE_ANALYTICS_ID,
RELEASE: process.env.SOURCE_COMMIT || process.env.SOURCE_VERSION || undefined,
};

View File

@@ -1,233 +0,0 @@
// @flow
import * as Sentry from "@sentry/node";
import debug from "debug";
import services from "./services";
import { createQueue } from "./utils/queue";
const log = debug("services");
export type UserEvent =
| {
name: | "users.create" // eslint-disable-line
| "users.signin"
| "users.update"
| "users.suspend"
| "users.activate"
| "users.delete",
userId: string,
teamId: string,
actorId: string,
ip: string,
}
| {
name: "users.invite",
teamId: string,
actorId: string,
data: {
email: string,
name: string,
},
ip: string,
};
export type DocumentEvent =
| {
name: | "documents.create" // eslint-disable-line
| "documents.publish"
| "documents.delete"
| "documents.permanent_delete"
| "documents.pin"
| "documents.unpin"
| "documents.archive"
| "documents.unarchive"
| "documents.restore"
| "documents.star"
| "documents.unstar",
documentId: string,
collectionId: string,
teamId: string,
actorId: string,
ip: string,
data: {
title: string,
source?: "import",
},
}
| {
name: "documents.move",
documentId: string,
collectionId: string,
teamId: string,
actorId: string,
data: {
collectionIds: string[],
documentIds: string[],
},
ip: string,
}
| {
name: | "documents.update" // eslint-disable-line
| "documents.update.delayed"
| "documents.update.debounced",
documentId: string,
collectionId: string,
createdAt: string,
teamId: string,
actorId: string,
data: {
title: string,
autosave: boolean,
done: boolean,
},
ip: string,
}
| {
name: "documents.title_change",
documentId: string,
collectionId: string,
createdAt: string,
teamId: string,
actorId: string,
data: {
title: string,
previousTitle: string,
},
ip: string,
};
export type RevisionEvent = {
name: "revisions.create",
documentId: string,
collectionId: string,
teamId: string,
};
export type CollectionImportEvent = {
name: "collections.import",
modelId: string,
teamId: string,
actorId: string,
data: { type: "outline" },
ip: string,
};
export type CollectionEvent =
| {
name: | "collections.create" // eslint-disable-line
| "collections.update"
| "collections.delete",
collectionId: string,
teamId: string,
actorId: string,
data: { name: string },
ip: string,
}
| {
name: "collections.add_user" | "collections.remove_user",
userId: string,
collectionId: string,
teamId: string,
actorId: string,
ip: string,
}
| {
name: "collections.add_group" | "collections.remove_group",
collectionId: string,
teamId: string,
actorId: string,
data: { name: string, groupId: string },
ip: string,
}
| {
name: "collections.move",
collectionId: string,
teamId: string,
actorId: string,
data: { index: string },
ip: string,
};
export type GroupEvent =
| {
name: "groups.create" | "groups.delete" | "groups.update",
actorId: string,
modelId: string,
teamId: string,
data: { name: string },
ip: string,
}
| {
name: "groups.add_user" | "groups.remove_user",
actorId: string,
userId: string,
modelId: string,
teamId: string,
data: { name: string },
ip: string,
};
export type IntegrationEvent = {
name: "integrations.create" | "integrations.update",
modelId: string,
teamId: string,
actorId: string,
ip: string,
};
export type TeamEvent = {
name: "teams.update",
teamId: string,
actorId: string,
data: Object,
ip: string,
};
export type Event =
| UserEvent
| DocumentEvent
| CollectionEvent
| CollectionImportEvent
| IntegrationEvent
| GroupEvent
| RevisionEvent
| TeamEvent;
const globalEventsQueue = createQueue("global events");
const serviceEventsQueue = createQueue("service events");
// this queue processes global events and hands them off to service hooks
globalEventsQueue.process(async (job) => {
const names = Object.keys(services);
names.forEach((name) => {
const service = services[name];
if (service.on) {
serviceEventsQueue.add(
{ ...job.data, service: name },
{ removeOnComplete: true }
);
}
});
});
// this queue processes an individual event for a specific service
serviceEventsQueue.process(async (job) => {
const event = job.data;
const service = services[event.service];
if (service.on) {
log(`${event.service} processing ${event.name}`);
service.on(event).catch((error) => {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("event", event);
Sentry.captureException(error);
});
} else {
throw error;
}
});
}
});
export default globalEventsQueue;

View File

@@ -38,7 +38,7 @@ async function exportAndEmailCollections(teamId: string, email: string) {
}); });
} }
exporterQueue.process(async (job) => { exporterQueue.process(async function exportProcessor(job) {
log("Process", job.data); log("Process", job.data);
switch (job.data.type) { switch (job.data.type) {

View File

@@ -1,121 +1,80 @@
// @flow // @flow
require("dotenv").config({ silent: true }); import env from "./env"; // eslint-disable-line import/order
import http from "http";
import debug from "debug";
import Koa from "koa";
import compress from "koa-compress";
import helmet from "koa-helmet";
import logger from "koa-logger";
import { uniq } from "lodash";
import throng from "throng";
import "./sentry";
import services from "./services";
import { initTracing } from "./tracing";
import { checkEnv, checkMigrations } from "./utils/startup";
import { checkUpdates } from "./utils/updates";
const errors = []; checkEnv();
const chalk = require("chalk"); initTracing();
const throng = require("throng"); checkMigrations();
// If the DataDog agent is installed and the DD_API_KEY environment variable is // If a services flag is passed it takes priority over the enviroment variable
// in the environment then we can safely attempt to start the DD tracer // for example: --services=web,worker
if (process.env.DD_API_KEY) { const normalizedServiceFlag = process.argv
require("dd-trace").init({ .slice(2)
// SOURCE_COMMIT is used by Docker Hub .filter((arg) => arg.startsWith("--services="))
// SOURCE_VERSION is used by Heroku .map((arg) => arg.split("=")[1])
version: process.env.SOURCE_COMMIT || process.env.SOURCE_VERSION, .join(",");
});
}
if ( // The default is to run all services to make development and OSS installations
!process.env.SECRET_KEY || // easier to deal with. Separate services are only needed at scale.
process.env.SECRET_KEY === "generate_a_new_key" const serviceNames = uniq(
) { (normalizedServiceFlag || env.SERVICES || "web,websockets,worker")
errors.push( .split(",")
`The ${chalk.bold( .map((service) => service.trim())
"SECRET_KEY" );
)} env variable must be set with the output of ${chalk.bold(
"$ openssl rand -hex 32"
)}`
);
}
if ( async function start() {
!process.env.UTILS_SECRET || const app = new Koa();
process.env.UTILS_SECRET === "generate_a_new_key" const server = http.createServer(app.callback());
) { const httpLogger = debug("http");
errors.push( const log = debug("server");
`The ${chalk.bold(
"UTILS_SECRET"
)} env variable must be set with a secret value, it is recommended to use the output of ${chalk.bold(
"$ openssl rand -hex 32"
)}`
);
}
if (process.env.AWS_ACCESS_KEY_ID) { app.use(logger((str, args) => httpLogger(str)));
[ app.use(compress());
"AWS_REGION", app.use(helmet());
"AWS_SECRET_ACCESS_KEY",
"AWS_S3_UPLOAD_BUCKET_URL", // loop through requestsed services at startup
"AWS_S3_UPLOAD_MAX_SIZE", for (const name of serviceNames) {
].forEach((key) => { if (!Object.keys(services).includes(name)) {
if (!process.env[key]) { throw new Error(`Unknown service ${name}`);
errors.push(
`The ${chalk.bold(
key
)} env variable must be set when using S3 compatible storage`
);
} }
log(`Starting ${name} service`);
const init = services[name];
await init(app, server);
}
server.on("error", (err) => {
throw err;
}); });
}
if (!process.env.URL) { server.on("listening", () => {
errors.push( const address = server.address();
`The ${chalk.bold( console.log(`\n> Listening on http://localhost:${address.port}\n`);
"URL" });
)} env variable must be set to the fully qualified, externally accessible URL, e.g https://wiki.mycompany.com`
);
}
if (!process.env.DATABASE_URL && !process.env.DATABASE_CONNECTION_POOL_URL) { server.listen(env.PORT || "3000");
errors.push(
`The ${chalk.bold(
"DATABASE_URL"
)} env variable must be set to the location of your postgres server, including username, password, and port`
);
} }
if (!process.env.REDIS_URL) {
errors.push(
`The ${chalk.bold(
"REDIS_URL"
)} env variable must be set to the location of your redis server, including username, password, and port`
);
}
if (errors.length) {
console.log(
chalk.bold.red(
"\n\nThe server could not start, please fix the following configuration errors and try again:\n"
)
);
errors.map((text) => console.log(` - ${text}`));
console.log("\n");
process.exit(1);
}
if (process.env.NODE_ENV === "production") {
console.log(
chalk.green(
`
Is your team enjoying Outline? Consider supporting future development by sponsoring the project:\n\nhttps://github.com/sponsors/outline
`
)
);
} else if (process.env.NODE_ENV === "development") {
console.log(
chalk.yellow(
`\nRunning Outline in development mode. To run Outline in production mode set the ${chalk.bold(
"NODE_ENV"
)} env variable to "production"\n`
)
);
}
const { start } = require("./main");
throng({ throng({
worker: start, worker: start,
// The number of workers to run, defaults to the number of CPUs available // The number of workers to run, defaults to the number of CPU's available
count: process.env.WEB_CONCURRENCY || undefined, count: process.env.WEB_CONCURRENCY || undefined,
}); });
if (env.ENABLE_UPDATES !== "false" && process.env.NODE_ENV === "production") {
checkUpdates();
setInterval(checkUpdates, 24 * 3600 * 1000);
}

View File

@@ -23,15 +23,15 @@ import {
import { SigninEmail, signinEmailText } from "./emails/SigninEmail"; import { SigninEmail, signinEmailText } from "./emails/SigninEmail";
import { WelcomeEmail, welcomeEmailText } from "./emails/WelcomeEmail"; import { WelcomeEmail, welcomeEmailText } from "./emails/WelcomeEmail";
import { baseStyles } from "./emails/components/EmailLayout"; import { baseStyles } from "./emails/components/EmailLayout";
import { createQueue } from "./utils/queue"; import { emailsQueue } from "./queues";
const log = debug("emails"); const log = debug("emails");
const useTestEmailService = const useTestEmailService =
process.env.NODE_ENV === "development" && !process.env.SMTP_USERNAME; process.env.NODE_ENV === "development" && !process.env.SMTP_USERNAME;
type Emails = "welcome" | "export"; export type EmailTypes = "welcome" | "export" | "invite" | "signin";
type SendMailType = { export type EmailSendOptions = {
to: string, to: string,
properties?: any, properties?: any,
title: string, title: string,
@@ -42,13 +42,6 @@ type SendMailType = {
attachments?: Object[], attachments?: Object[],
}; };
type EmailJob = {
data: {
type: Emails,
opts: SendMailType,
},
};
/** /**
* Mailer * Mailer
* *
@@ -63,7 +56,61 @@ type EmailJob = {
export class Mailer { export class Mailer {
transporter: ?any; transporter: ?any;
sendMail = async (data: SendMailType): ?Promise<*> => { constructor() {
this.loadTransport();
}
async loadTransport() {
if (process.env.SMTP_HOST) {
let smtpConfig = {
host: process.env.SMTP_HOST,
port: process.env.SMTP_PORT,
secure:
"SMTP_SECURE" in process.env
? process.env.SMTP_SECURE === "true"
: process.env.NODE_ENV === "production",
auth: undefined,
tls:
"SMTP_TLS_CIPHERS" in process.env
? { ciphers: process.env.SMTP_TLS_CIPHERS }
: undefined,
};
if (process.env.SMTP_USERNAME) {
smtpConfig.auth = {
user: process.env.SMTP_USERNAME,
pass: process.env.SMTP_PASSWORD,
};
}
this.transporter = nodemailer.createTransport(smtpConfig);
return;
}
if (useTestEmailService) {
log("SMTP_USERNAME not provided, generating test account…");
try {
let testAccount = await nodemailer.createTestAccount();
const smtpConfig = {
host: "smtp.ethereal.email",
port: 587,
secure: false,
auth: {
user: testAccount.user,
pass: testAccount.pass,
},
};
this.transporter = nodemailer.createTransport(smtpConfig);
} catch (err) {
log(`Could not generate test account: ${err.message}`);
}
}
}
sendMail = async (data: EmailSendOptions): ?Promise<*> => {
const { transporter } = this; const { transporter } = this;
if (transporter) { if (transporter) {
@@ -164,87 +211,23 @@ export class Mailer {
}); });
}; };
constructor() { sendTemplate = async (type: EmailTypes, opts?: Object = {}) => {
this.loadTransport(); await emailsQueue.add(
} {
type,
async loadTransport() { opts,
if (process.env.SMTP_HOST) { },
let smtpConfig = { {
host: process.env.SMTP_HOST, attempts: 5,
port: process.env.SMTP_PORT, removeOnComplete: true,
secure: backoff: {
"SMTP_SECURE" in process.env type: "exponential",
? process.env.SMTP_SECURE === "true" delay: 60 * 1000,
: process.env.NODE_ENV === "production", },
auth: undefined,
tls:
"SMTP_TLS_CIPHERS" in process.env
? { ciphers: process.env.SMTP_TLS_CIPHERS }
: undefined,
};
if (process.env.SMTP_USERNAME) {
smtpConfig.auth = {
user: process.env.SMTP_USERNAME,
pass: process.env.SMTP_PASSWORD,
};
} }
);
this.transporter = nodemailer.createTransport(smtpConfig); };
return;
}
if (useTestEmailService) {
log("SMTP_USERNAME not provided, generating test account…");
try {
let testAccount = await nodemailer.createTestAccount();
const smtpConfig = {
host: "smtp.ethereal.email",
port: 587,
secure: false,
auth: {
user: testAccount.user,
pass: testAccount.pass,
},
};
this.transporter = nodemailer.createTransport(smtpConfig);
} catch (err) {
log(`Could not generate test account: ${err.message}`);
}
}
}
} }
const mailer = new Mailer(); const mailer = new Mailer();
export default mailer; export default mailer;
export const mailerQueue = createQueue("email");
mailerQueue.process(async (job: EmailJob) => {
// $FlowIssue flow doesn't like dynamic values
await mailer[job.data.type](job.data.opts);
});
export const sendEmail = (type: Emails, to: string, options?: Object = {}) => {
mailerQueue.add(
{
type,
opts: {
to,
...options,
},
},
{
attempts: 5,
removeOnComplete: true,
backoff: {
type: "exponential",
delay: 60 * 1000,
},
}
);
};

View File

@@ -1,246 +0,0 @@
// @flow
import http from "http";
import * as Sentry from "@sentry/node";
import IO from "socket.io";
import socketRedisAdapter from "socket.io-redis";
import SocketAuth from "socketio-auth";
import app from "./app";
import { Document, Collection, View } from "./models";
import policy from "./policies";
import { client, subscriber } from "./redis";
import { getUserForJWT } from "./utils/jwt";
import * as metrics from "./utils/metrics";
import { checkMigrations } from "./utils/startup";
const server = http.createServer(app.callback());
let io;
const { can } = policy;
io = IO(server, {
path: "/realtime",
serveClient: false,
cookie: false,
});
io.adapter(
socketRedisAdapter({
pubClient: client,
subClient: subscriber,
})
);
io.origins((_, callback) => {
callback(null, true);
});
io.of("/").adapter.on("error", (err) => {
if (err.name === "MaxRetriesPerRequestError") {
console.error(`Redis error: ${err.message}. Shutting down now.`);
throw err;
} else {
console.error(`Redis error: ${err.message}`);
}
});
io.on("connection", (socket) => {
metrics.increment("websockets.connected");
metrics.gaugePerInstance(
"websockets.count",
socket.client.conn.server.clientsCount
);
socket.on("disconnect", () => {
metrics.increment("websockets.disconnected");
metrics.gaugePerInstance(
"websockets.count",
socket.client.conn.server.clientsCount
);
});
});
SocketAuth(io, {
authenticate: async (socket, data, callback) => {
const { token } = data;
try {
const user = await getUserForJWT(token);
socket.client.user = user;
// store the mapping between socket id and user id in redis
// so that it is accessible across multiple server nodes
await client.hset(socket.id, "userId", user.id);
return callback(null, true);
} catch (err) {
return callback(err);
}
},
postAuthenticate: async (socket, data) => {
const { user } = socket.client;
// the rooms associated with the current team
// and user so we can send authenticated events
let rooms = [`team-${user.teamId}`, `user-${user.id}`];
// the rooms associated with collections this user
// has access to on connection. New collection subscriptions
// are managed from the client as needed through the 'join' event
const collectionIds = await user.collectionIds();
collectionIds.forEach((collectionId) =>
rooms.push(`collection-${collectionId}`)
);
// join all of the rooms at once
socket.join(rooms);
// allow the client to request to join rooms
socket.on("join", async (event) => {
// user is joining a collection channel, because their permissions have
// changed, granting them access.
if (event.collectionId) {
const collection = await Collection.scope({
method: ["withMembership", user.id],
}).findByPk(event.collectionId);
if (can(user, "read", collection)) {
socket.join(`collection-${event.collectionId}`, () => {
metrics.increment("websockets.collections.join");
});
}
}
// user is joining a document channel, because they have navigated to
// view a document.
if (event.documentId) {
const document = await Document.findByPk(event.documentId, {
userId: user.id,
});
if (can(user, "read", document)) {
const room = `document-${event.documentId}`;
await View.touch(event.documentId, user.id, event.isEditing);
const editing = await View.findRecentlyEditingByDocument(
event.documentId
);
socket.join(room, () => {
metrics.increment("websockets.documents.join");
// let everyone else in the room know that a new user joined
io.to(room).emit("user.join", {
userId: user.id,
documentId: event.documentId,
isEditing: event.isEditing,
});
// let this user know who else is already present in the room
io.in(room).clients(async (err, sockets) => {
if (err) {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("clients", sockets);
Sentry.captureException(err);
});
} else {
console.error(err);
}
return;
}
// because a single user can have multiple socket connections we
// need to make sure that only unique userIds are returned. A Map
// makes this easy.
let userIds = new Map();
for (const socketId of sockets) {
const userId = await client.hget(socketId, "userId");
userIds.set(userId, userId);
}
socket.emit("document.presence", {
documentId: event.documentId,
userIds: Array.from(userIds.keys()),
editingIds: editing.map((view) => view.userId),
});
});
});
}
}
});
// allow the client to request to leave rooms
socket.on("leave", (event) => {
if (event.collectionId) {
socket.leave(`collection-${event.collectionId}`, () => {
metrics.increment("websockets.collections.leave");
});
}
if (event.documentId) {
const room = `document-${event.documentId}`;
socket.leave(room, () => {
metrics.increment("websockets.documents.leave");
io.to(room).emit("user.leave", {
userId: user.id,
documentId: event.documentId,
});
});
}
});
socket.on("disconnecting", () => {
const rooms = Object.keys(socket.rooms);
rooms.forEach((room) => {
if (room.startsWith("document-")) {
const documentId = room.replace("document-", "");
io.to(room).emit("user.leave", {
userId: user.id,
documentId,
});
}
});
});
socket.on("presence", async (event) => {
metrics.increment("websockets.presence");
const room = `document-${event.documentId}`;
if (event.documentId && socket.rooms[room]) {
const view = await View.touch(
event.documentId,
user.id,
event.isEditing
);
view.user = user;
io.to(room).emit("user.presence", {
userId: user.id,
documentId: event.documentId,
isEditing: event.isEditing,
});
}
});
},
});
server.on("error", (err) => {
throw err;
});
server.on("listening", () => {
const address = server.address();
console.log(`\n> Listening on http://localhost:${address.port}\n`);
});
export async function start(id: string) {
console.log(`Started worker ${id}`);
await checkMigrations();
server.listen(process.env.PORT || "3000");
}
export const socketio = io;
export default server;

View File

@@ -1,5 +1,5 @@
// @flow // @flow
import events from "../events"; import { globalEventQueue } from "../queues";
import { DataTypes, sequelize } from "../sequelize"; import { DataTypes, sequelize } from "../sequelize";
const Event = sequelize.define("event", { const Event = sequelize.define("event", {
@@ -45,13 +45,13 @@ Event.beforeCreate((event) => {
}); });
Event.afterCreate((event) => { Event.afterCreate((event) => {
events.add(event, { removeOnComplete: true }); globalEventQueue.add(event, { removeOnComplete: true });
}); });
// add can be used to send events into the event system without recording them // add can be used to send events into the event system without recording them
// in the database / audit trail // in the database / audit trail
Event.add = (event) => { Event.add = (event) => {
events.add(Event.build(event), { removeOnComplete: true }); globalEventQueue.add(Event.build(event), { removeOnComplete: true });
}; };
Event.ACTIVITY_EVENTS = [ Event.ACTIVITY_EVENTS = [

20
server/presenters/env.js Normal file
View File

@@ -0,0 +1,20 @@
// @flow
// Note: This entire object is stringified in the HTML exposed to the client
// do not add anything here that should be a secret or password
export default function present(env: Object): Object {
return {
URL: env.URL,
CDN_URL: env.CDN_URL || "",
DEPLOYMENT: env.DEPLOYMENT,
ENVIRONMENT: env.NODE_ENV,
SENTRY_DSN: env.SENTRY_DSN,
TEAM_LOGO: env.TEAM_LOGO,
SLACK_KEY: env.SLACK_KEY,
SLACK_APP_ID: env.SLACK_APP_ID,
MAXIMUM_IMPORT_SIZE: env.MAXIMUM_IMPORT_SIZE || 1024 * 1000 * 5,
SUBDOMAINS_ENABLED: env.SUBDOMAINS_ENABLED === "true",
GOOGLE_ANALYTICS_ID: env.GOOGLE_ANALYTICS_ID,
RELEASE: env.SOURCE_COMMIT || env.SOURCE_VERSION || undefined,
};
}

7
server/queues/index.js Normal file
View File

@@ -0,0 +1,7 @@
// @flow
import { createQueue } from "../utils/queue";
export const globalEventQueue = createQueue("globalEvents");
export const processorEventQueue = createQueue("processorEvents");
export const websocketsQueue = createQueue("websockets");
export const emailsQueue = createQueue("emails");

View File

@@ -1,11 +1,11 @@
// @flow // @flow
import type { DocumentEvent, RevisionEvent } from "../events"; import { Document, Backlink } from "../../models";
import { Document, Backlink } from "../models"; import { Op } from "../../sequelize";
import { Op } from "../sequelize"; import type { DocumentEvent, RevisionEvent } from "../../types";
import parseDocumentIds from "../utils/parseDocumentIds"; import parseDocumentIds from "../../utils/parseDocumentIds";
import slugify from "../utils/slugify"; import slugify from "../../utils/slugify";
export default class Backlinks { export default class BacklinksProcessor {
async on(event: DocumentEvent | RevisionEvent) { async on(event: DocumentEvent | RevisionEvent) {
switch (event.name) { switch (event.name) {
case "documents.publish": { case "documents.publish": {

View File

@@ -1,7 +1,7 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import { Backlink } from "../models"; import { Backlink } from "../../models";
import { buildDocument } from "../test/factories"; import { buildDocument } from "../../test/factories";
import { flushdb } from "../test/support"; import { flushdb } from "../../test/support";
import BacklinksService from "./backlinks"; import BacklinksService from "./backlinks";
const Backlinks = new BacklinksService(); const Backlinks = new BacklinksService();

View File

@@ -1,12 +1,13 @@
// @flow // @flow
import events, { type Event } from "../events"; import { Document } from "../../models";
import { Document } from "../models"; import { globalEventQueue } from "../../queues";
import type { Event } from "../../types";
export default class Debouncer { export default class DebounceProcessor {
async on(event: Event) { async on(event: Event) {
switch (event.name) { switch (event.name) {
case "documents.update": { case "documents.update": {
events.add( globalEventQueue.add(
{ {
...event, ...event,
name: "documents.update.delayed", name: "documents.update.delayed",
@@ -29,7 +30,7 @@ export default class Debouncer {
// this functions as a simple distributed debounce. // this functions as a simple distributed debounce.
if (document.updatedAt > new Date(event.createdAt)) return; if (document.updatedAt > new Date(event.createdAt)) return;
events.add( globalEventQueue.add(
{ {
...event, ...event,
name: "documents.update.debounced", name: "documents.update.debounced",

View File

@@ -0,0 +1,14 @@
// @flow
import mailer, { type EmailSendOptions, type EmailTypes } from "../../mailer";
type EmailEvent = {
type: EmailTypes,
opts: EmailSendOptions,
};
export default class EmailsProcessor {
async on(event: EmailEvent) {
// $FlowIssue flow rightly doesn't like dynaic values
await mailer[event.type](event.opts);
}
}

View File

@@ -2,11 +2,11 @@
import fs from "fs"; import fs from "fs";
import os from "os"; import os from "os";
import File from "formidable/lib/file"; import File from "formidable/lib/file";
import collectionImporter from "../commands/collectionImporter"; import collectionImporter from "../../commands/collectionImporter";
import type { Event } from "../events"; import { Attachment, User } from "../../models";
import { Attachment, User } from "../models"; import type { Event } from "../../types";
export default class Importer { export default class ImportsProcessor {
async on(event: Event) { async on(event: Event) {
switch (event.name) { switch (event.name) {
case "collections.import": { case "collections.import": {

View File

@@ -1,7 +1,6 @@
// @flow // @flow
import debug from "debug"; import debug from "debug";
import type { DocumentEvent, CollectionEvent, Event } from "../events"; import mailer from "../../mailer";
import mailer from "../mailer";
import { import {
View, View,
Document, Document,
@@ -9,12 +8,13 @@ import {
Collection, Collection,
User, User,
NotificationSetting, NotificationSetting,
} from "../models"; } from "../../models";
import { Op } from "../sequelize"; import { Op } from "../../sequelize";
import type { DocumentEvent, CollectionEvent, Event } from "../../types";
const log = debug("services"); const log = debug("services");
export default class Notifications { export default class NotificationsProcessor {
async on(event: Event) { async on(event: Event) {
switch (event.name) { switch (event.name) {
case "documents.publish": case "documents.publish":

View File

@@ -1,11 +1,15 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import mailer from "../mailer"; import mailer from "../../mailer";
import { View, NotificationSetting } from "../models"; import { View, NotificationSetting } from "../../models";
import { buildDocument, buildCollection, buildUser } from "../test/factories"; import {
import { flushdb } from "../test/support"; buildDocument,
buildCollection,
buildUser,
} from "../../test/factories";
import { flushdb } from "../../test/support";
import NotificationsService from "./notifications"; import NotificationsService from "./notifications";
jest.mock("../mailer"); jest.mock("../../mailer");
const Notifications = new NotificationsService(); const Notifications = new NotificationsService();

View File

@@ -1,10 +1,10 @@
// @flow // @flow
import invariant from "invariant"; import invariant from "invariant";
import revisionCreator from "../commands/revisionCreator"; import revisionCreator from "../../commands/revisionCreator";
import type { DocumentEvent, RevisionEvent } from "../events"; import { Revision, Document, User } from "../../models";
import { Revision, Document, User } from "../models"; import type { DocumentEvent, RevisionEvent } from "../../types";
export default class Revisions { export default class RevisionsProcessor {
async on(event: DocumentEvent | RevisionEvent) { async on(event: DocumentEvent | RevisionEvent) {
switch (event.name) { switch (event.name) {
case "documents.publish": case "documents.publish":

View File

@@ -1,7 +1,7 @@
/* eslint-disable flowtype/require-valid-file-annotation */ /* eslint-disable flowtype/require-valid-file-annotation */
import { Revision } from "../models"; import { Revision } from "../../models";
import { buildDocument } from "../test/factories"; import { buildDocument } from "../../test/factories";
import { flushdb } from "../test/support"; import { flushdb } from "../../test/support";
import RevisionsService from "./revisions"; import RevisionsService from "./revisions";
const Revisions = new RevisionsService(); const Revisions = new RevisionsService();

View File

@@ -1,10 +1,10 @@
// @flow // @flow
import fetch from "fetch-with-proxy"; import fetch from "fetch-with-proxy";
import type { DocumentEvent, IntegrationEvent, Event } from "../events"; import { Document, Integration, Collection, Team } from "../../models";
import { Document, Integration, Collection, Team } from "../models"; import { presentSlackAttachment } from "../../presenters";
import { presentSlackAttachment } from "../presenters"; import type { DocumentEvent, IntegrationEvent, Event } from "../../types";
export default class Slack { export default class SlackProcessor {
async on(event: Event) { async on(event: Event) {
switch (event.name) { switch (event.name) {
case "documents.publish": case "documents.publish":

View File

@@ -0,0 +1,498 @@
// @flow
import { subHours } from "date-fns";
import {
Document,
Collection,
Group,
CollectionGroup,
GroupUser,
} from "../../models";
import { Op } from "../../sequelize";
import type { Event } from "../../types";
export default class WebsocketsProcessor {
async on(event: Event, socketio: any) {
switch (event.name) {
case "documents.publish":
case "documents.restore":
case "documents.archive":
case "documents.unarchive": {
const document = await Document.findByPk(event.documentId, {
paranoid: false,
});
const channel = document.publishedAt
? `collection-${document.collectionId}`
: `user-${event.actorId}`;
return socketio.to(channel).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
collectionIds: [
{
id: document.collectionId,
},
],
});
}
case "documents.delete": {
const document = await Document.findByPk(event.documentId, {
paranoid: false,
});
if (!document.publishedAt) {
return socketio.to(`user-${document.createdById}`).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
});
}
return socketio
.to(`collection-${document.collectionId}`)
.emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
collectionIds: [
{
id: document.collectionId,
},
],
});
}
case "documents.permanent_delete": {
return socketio
.to(`collection-${event.collectionId}`)
.emit(event.name, {
documentId: event.documentId,
});
}
case "documents.pin":
case "documents.unpin":
case "documents.update": {
const document = await Document.findByPk(event.documentId, {
paranoid: false,
});
const channel = document.publishedAt
? `collection-${document.collectionId}`
: `user-${event.actorId}`;
return socketio.to(channel).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
});
}
case "documents.create": {
const document = await Document.findByPk(event.documentId);
return socketio.to(`user-${event.actorId}`).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
collectionIds: [
{
id: document.collectionId,
},
],
});
}
case "documents.star":
case "documents.unstar": {
return socketio.to(`user-${event.actorId}`).emit(event.name, {
documentId: event.documentId,
});
}
case "documents.move": {
const documents = await Document.findAll({
where: {
id: event.data.documentIds,
},
paranoid: false,
});
documents.forEach((document) => {
socketio.to(`collection-${document.collectionId}`).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
});
});
event.data.collectionIds.forEach((collectionId) => {
socketio.to(`collection-${collectionId}`).emit("entities", {
event: event.name,
collectionIds: [{ id: collectionId }],
});
});
return;
}
case "collections.create": {
const collection = await Collection.findByPk(event.collectionId, {
paranoid: false,
});
socketio
.to(
collection.permission
? `team-${collection.teamId}`
: `collection-${collection.id}`
)
.emit("entities", {
event: event.name,
collectionIds: [
{
id: collection.id,
updatedAt: collection.updatedAt,
},
],
});
return socketio
.to(
collection.permission
? `team-${collection.teamId}`
: `collection-${collection.id}`
)
.emit("join", {
event: event.name,
collectionId: collection.id,
});
}
case "collections.update":
case "collections.delete": {
const collection = await Collection.findByPk(event.collectionId, {
paranoid: false,
});
return socketio.to(`team-${collection.teamId}`).emit("entities", {
event: event.name,
collectionIds: [
{
id: collection.id,
updatedAt: collection.updatedAt,
},
],
});
}
case "collections.move": {
return socketio
.to(`collection-${event.collectionId}`)
.emit("collections.update_index", {
collectionId: event.collectionId,
index: event.data.index,
});
}
case "collections.add_user": {
// the user being added isn't yet in the websocket channel for the collection
// so they need to be notified separately
socketio.to(`user-${event.userId}`).emit(event.name, {
event: event.name,
userId: event.userId,
collectionId: event.collectionId,
});
// let everyone with access to the collection know a user was added
socketio.to(`collection-${event.collectionId}`).emit(event.name, {
event: event.name,
userId: event.userId,
collectionId: event.collectionId,
});
// tell any user clients to connect to the websocket channel for the collection
return socketio.to(`user-${event.userId}`).emit("join", {
event: event.name,
collectionId: event.collectionId,
});
}
case "collections.remove_user": {
const membershipUserIds = await Collection.membershipUserIds(
event.collectionId
);
if (membershipUserIds.includes(event.userId)) {
// Even though we just removed a user from the collection
// the user still has access through some means
// treat this like an add, so that the client re-syncs policies
socketio.to(`user-${event.userId}`).emit("collections.add_user", {
event: "collections.add_user",
userId: event.userId,
collectionId: event.collectionId,
});
} else {
// let everyone with access to the collection know a user was removed
socketio
.to(`collection-${event.collectionId}`)
.emit("collections.remove_user", {
event: event.name,
userId: event.userId,
collectionId: event.collectionId,
});
// tell any user clients to disconnect from the websocket channel for the collection
socketio.to(`user-${event.userId}`).emit("leave", {
event: event.name,
collectionId: event.collectionId,
});
}
return;
}
case "collections.add_group": {
const group = await Group.findByPk(event.data.groupId);
// the users being added are not yet in the websocket channel for the collection
// so they need to be notified separately
for (const groupMembership of group.groupMemberships) {
socketio
.to(`user-${groupMembership.userId}`)
.emit("collections.add_user", {
event: event.name,
userId: groupMembership.userId,
collectionId: event.collectionId,
});
// tell any user clients to connect to the websocket channel for the collection
socketio.to(`user-${groupMembership.userId}`).emit("join", {
event: event.name,
collectionId: event.collectionId,
});
}
return;
}
case "collections.remove_group": {
const group = await Group.findByPk(event.data.groupId);
const membershipUserIds = await Collection.membershipUserIds(
event.collectionId
);
for (const groupMembership of group.groupMemberships) {
if (membershipUserIds.includes(groupMembership.userId)) {
// the user still has access through some means...
// treat this like an add, so that the client re-syncs policies
socketio
.to(`user-${groupMembership.userId}`)
.emit("collections.add_user", {
event: event.name,
userId: groupMembership.userId,
collectionId: event.collectionId,
});
} else {
// let users in the channel know they were removed
socketio
.to(`user-${groupMembership.userId}`)
.emit("collections.remove_user", {
event: event.name,
userId: groupMembership.userId,
collectionId: event.collectionId,
});
// tell any user clients to disconnect to the websocket channel for the collection
socketio.to(`user-${groupMembership.userId}`).emit("leave", {
event: event.name,
collectionId: event.collectionId,
});
}
}
return;
}
case "groups.create":
case "groups.update": {
const group = await Group.findByPk(event.modelId, {
paranoid: false,
});
return socketio.to(`team-${group.teamId}`).emit("entities", {
event: event.name,
groupIds: [
{
id: group.id,
updatedAt: group.updatedAt,
},
],
});
}
case "groups.add_user": {
// do an add user for every collection that the group is a part of
const collectionGroupMemberships = await CollectionGroup.findAll({
where: { groupId: event.modelId },
});
for (const collectionGroup of collectionGroupMemberships) {
// the user being added isn't yet in the websocket channel for the collection
// so they need to be notified separately
socketio.to(`user-${event.userId}`).emit("collections.add_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
// let everyone with access to the collection know a user was added
socketio
.to(`collection-${collectionGroup.collectionId}`)
.emit("collections.add_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
// tell any user clients to connect to the websocket channel for the collection
return socketio.to(`user-${event.userId}`).emit("join", {
event: event.name,
collectionId: collectionGroup.collectionId,
});
}
return;
}
case "groups.remove_user": {
const collectionGroupMemberships = await CollectionGroup.findAll({
where: { groupId: event.modelId },
});
for (const collectionGroup of collectionGroupMemberships) {
// if the user has any memberships remaining on the collection
// we need to emit add instead of remove
const collection = await Collection.scope({
method: ["withMembership", event.userId],
}).findByPk(collectionGroup.collectionId);
if (!collection) {
continue;
}
const hasMemberships =
collection.memberships.length > 0 ||
collection.collectionGroupMemberships.length > 0;
if (hasMemberships) {
// the user still has access through some means...
// treat this like an add, so that the client re-syncs policies
socketio.to(`user-${event.userId}`).emit("collections.add_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
} else {
// let everyone with access to the collection know a user was removed
socketio
.to(`collection-${collectionGroup.collectionId}`)
.emit("collections.remove_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
// tell any user clients to disconnect from the websocket channel for the collection
socketio.to(`user-${event.userId}`).emit("leave", {
event: event.name,
collectionId: collectionGroup.collectionId,
});
}
}
return;
}
case "groups.delete": {
const group = await Group.findByPk(event.modelId, {
paranoid: false,
});
socketio.to(`team-${group.teamId}`).emit("entities", {
event: event.name,
groupIds: [
{
id: group.id,
updatedAt: group.updatedAt,
},
],
});
// we the users and collection relations that were just severed as a result of the group deletion
// since there are cascading deletes, we approximate this by looking for the recently deleted
// items in the GroupUser and CollectionGroup tables
const groupUsers = await GroupUser.findAll({
paranoid: false,
where: {
groupId: event.modelId,
deletedAt: {
[Op.gt]: subHours(new Date(), 1),
},
},
});
const collectionGroupMemberships = await CollectionGroup.findAll({
paranoid: false,
where: {
groupId: event.modelId,
deletedAt: {
[Op.gt]: subHours(new Date(), 1),
},
},
});
for (const collectionGroup of collectionGroupMemberships) {
const membershipUserIds = await Collection.membershipUserIds(
collectionGroup.collectionId
);
for (const groupUser of groupUsers) {
if (membershipUserIds.includes(groupUser.userId)) {
// the user still has access through some means...
// treat this like an add, so that the client re-syncs policies
socketio
.to(`user-${groupUser.userId}`)
.emit("collections.add_user", {
event: event.name,
userId: groupUser.userId,
collectionId: collectionGroup.collectionId,
});
} else {
// let everyone with access to the collection know a user was removed
socketio
.to(`collection-${collectionGroup.collectionId}`)
.emit("collections.remove_user", {
event: event.name,
userId: groupUser.userId,
collectionId: collectionGroup.collectionId,
});
// tell any user clients to disconnect from the websocket channel for the collection
socketio.to(`user-${groupUser.userId}`).emit("leave", {
event: event.name,
collectionId: collectionGroup.collectionId,
});
}
}
}
return;
}
default:
}
}
}

View File

@@ -11,6 +11,7 @@ import { languages } from "../shared/i18n";
import env from "./env"; import env from "./env";
import apexRedirect from "./middlewares/apexRedirect"; import apexRedirect from "./middlewares/apexRedirect";
import Share from "./models/Share"; import Share from "./models/Share";
import presentEnv from "./presenters/env";
import { opensearchResponse } from "./utils/opensearch"; import { opensearchResponse } from "./utils/opensearch";
import prefetchTags from "./utils/prefetchTags"; import prefetchTags from "./utils/prefetchTags";
import { robotsResponse } from "./utils/robots"; import { robotsResponse } from "./utils/robots";
@@ -52,7 +53,7 @@ const renderApp = async (ctx, next, title = "Outline") => {
const page = await readIndexFile(ctx); const page = await readIndexFile(ctx);
const environment = ` const environment = `
window.env = ${JSON.stringify(env)}; window.env = ${JSON.stringify(presentEnv(env))};
`; `;
ctx.body = page ctx.body = page
.toString() .toString()

View File

@@ -1,9 +1,10 @@
// @flow // @flow
import TestServer from "fetch-test-server"; import TestServer from "fetch-test-server";
import app from "./app"; import webService from "./services/web";
import { buildShare, buildDocument } from "./test/factories"; import { buildShare, buildDocument } from "./test/factories";
import { flushdb } from "./test/support"; import { flushdb } from "./test/support";
const app = webService();
const server = new TestServer(app.callback()); const server = new TestServer(app.callback());
beforeEach(() => flushdb()); beforeEach(() => flushdb());

21
server/sentry.js Normal file
View File

@@ -0,0 +1,21 @@
// @flow
import * as Sentry from "@sentry/node";
import env from "./env";
if (env.SENTRY_DSN) {
Sentry.init({
dsn: env.SENTRY_DSN,
environment: env.ENVIRONMENT,
release: env.RELEASE,
maxBreadcrumbs: 0,
ignoreErrors: [
// emitted by Koa when bots attempt to snoop on paths such as wp-admin
// or the user client submits a bad request. These are expected in normal
// running of the application and don't need to be reported.
"BadRequestError",
"UnauthorizedError",
],
});
}
export default Sentry;

View File

@@ -1,18 +1,6 @@
// @flow // @flow
import debug from "debug"; import web from "./web";
import { requireDirectory } from "../utils/fs"; import websockets from "./websockets";
import worker from "./worker";
const log = debug("services"); export default { web, websockets, worker };
const services = {};
if (!process.env.SINGLE_RUN) {
requireDirectory(__dirname).forEach(([module, name]) => {
if (module && module.default) {
const Service = module.default;
services[name] = new Service();
log(`loaded ${name} service`);
}
});
}
export default services;

174
server/services/web.js Normal file
View File

@@ -0,0 +1,174 @@
// @flow
import http from "http";
import Koa from "koa";
import {
contentSecurityPolicy,
dnsPrefetchControl,
referrerPolicy,
} from "koa-helmet";
import mount from "koa-mount";
import onerror from "koa-onerror";
import enforceHttps from "koa-sslify";
import api from "../api";
import auth from "../auth";
import emails from "../emails";
import env from "../env";
import routes from "../routes";
import Sentry from "../sentry";
const isProduction = env.NODE_ENV === "production";
const isTest = env.NODE_ENV === "test";
// Construct scripts CSP based on services in use by this installation
const defaultSrc = ["'self'"];
const scriptSrc = [
"'self'",
"'unsafe-inline'",
"'unsafe-eval'",
"gist.github.com",
];
if (env.GOOGLE_ANALYTICS_ID) {
scriptSrc.push("www.google-analytics.com");
}
if (env.CDN_URL) {
scriptSrc.push(env.CDN_URL);
defaultSrc.push(env.CDN_URL);
}
export default function init(app: Koa = new Koa(), server?: http.Server): Koa {
if (isProduction) {
// Force redirect to HTTPS protocol unless explicitly disabled
if (process.env.FORCE_HTTPS !== "false") {
app.use(
enforceHttps({
trustProtoHeader: true,
})
);
} else {
console.warn("Enforced https was disabled with FORCE_HTTPS env variable");
}
// trust header fields set by our proxy. eg X-Forwarded-For
app.proxy = true;
} else if (!isTest) {
/* eslint-disable global-require */
const convert = require("koa-convert");
const webpack = require("webpack");
const devMiddleware = require("koa-webpack-dev-middleware");
const hotMiddleware = require("koa-webpack-hot-middleware");
const config = require("../../webpack.config.dev");
const compile = webpack(config);
/* eslint-enable global-require */
const middleware = devMiddleware(compile, {
// display no info to console (only warnings and errors)
noInfo: true,
// display nothing to the console
quiet: false,
watchOptions: {
poll: 1000,
ignored: ["node_modules", "flow-typed", "server", "build", "__mocks__"],
},
// public path to bind the middleware to
// use the same as in webpack
publicPath: config.output.publicPath,
// options for formatting the statistics
stats: {
colors: true,
},
});
app.use(async (ctx, next) => {
ctx.webpackConfig = config;
ctx.devMiddleware = middleware;
await next();
});
app.use(convert(middleware));
app.use(
convert(
hotMiddleware(compile, {
log: console.log, // eslint-disable-line
path: "/__webpack_hmr",
heartbeat: 10 * 1000,
})
)
);
app.use(mount("/emails", emails));
}
// catch errors in one place, automatically set status and response headers
onerror(app);
app.on("error", (error, ctx) => {
// we don't need to report every time a request stops to the bug tracker
if (error.code === "EPIPE" || error.code === "ECONNRESET") {
console.warn("Connection error", { error });
return;
}
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
const requestId = ctx.headers["x-request-id"];
if (requestId) {
scope.setTag("request_id", requestId);
}
const authType = ctx.state ? ctx.state.authType : undefined;
if (authType) {
scope.setTag("auth_type", authType);
}
const userId =
ctx.state && ctx.state.user ? ctx.state.user.id : undefined;
if (userId) {
scope.setUser({ id: userId });
}
scope.addEventProcessor(function (event) {
return Sentry.Handlers.parseRequest(event, ctx.request);
});
Sentry.captureException(error);
});
} else {
console.error(error);
}
});
app.use(mount("/auth", auth));
app.use(mount("/api", api));
// Sets common security headers by default, such as no-sniff, hsts, hide powered
// by etc, these are applied after auth and api so they are only returned on
// standard non-XHR accessed routes
app.use(async (ctx, next) => {
ctx.set("Permissions-Policy", "interest-cohort=()");
await next();
});
app.use(
contentSecurityPolicy({
directives: {
defaultSrc,
scriptSrc,
styleSrc: ["'self'", "'unsafe-inline'", "github.githubassets.com"],
imgSrc: ["*", "data:", "blob:"],
frameSrc: ["*"],
connectSrc: ["*"],
// Do not use connect-src: because self + websockets does not work in
// Safari, ref: https://bugs.webkit.org/show_bug.cgi?id=201591
},
})
);
// Allow DNS prefetching for performance, we do not care about leaking requests
// to our own CDN's
app.use(dnsPrefetchControl({ allow: true }));
app.use(referrerPolicy({ policy: "no-referrer" }));
app.use(mount(routes));
return app;
}

View File

@@ -1,503 +1,242 @@
// @flow // @flow
import { subHours } from "date-fns"; import http from "http";
import type { Event } from "../events"; import Koa from "koa";
import { socketio } from "../main"; import IO from "socket.io";
import { import socketRedisAdapter from "socket.io-redis";
Document, import SocketAuth from "socketio-auth";
Collection, import env from "../env";
Group, import { Document, Collection, View } from "../models";
CollectionGroup, import policy from "../policies";
GroupUser, import { websocketsQueue } from "../queues";
} from "../models"; import WebsocketsProcessor from "../queues/processors/websockets";
import { Op } from "../sequelize"; import { client, subscriber } from "../redis";
import Sentry from "../sentry";
import { getUserForJWT } from "../utils/jwt";
import * as metrics from "../utils/metrics";
export default class Websockets { const { can } = policy;
async on(event: Event) { const websockets = new WebsocketsProcessor();
if (!socketio) {
return; export default function init(app: Koa, server: http.Server) {
const io = IO(server, {
path: "/realtime",
serveClient: false,
cookie: false,
});
io.adapter(
socketRedisAdapter({
pubClient: client,
subClient: subscriber,
})
);
io.origins((_, callback) => {
callback(null, true);
});
io.of("/").adapter.on("error", (err) => {
if (err.name === "MaxRetriesPerRequestError") {
console.error(`Redis error: ${err.message}. Shutting down now.`);
throw err;
} else {
console.error(`Redis error: ${err.message}`);
} }
});
switch (event.name) { io.on("connection", (socket) => {
case "documents.publish": metrics.increment("websockets.connected");
case "documents.restore": metrics.gaugePerInstance(
case "documents.archive": "websockets.count",
case "documents.unarchive": { socket.client.conn.server.clientsCount
const document = await Document.findByPk(event.documentId, { );
paranoid: false,
});
const channel = document.publishedAt socket.on("disconnect", () => {
? `collection-${document.collectionId}` metrics.increment("websockets.disconnected");
: `user-${event.actorId}`; metrics.gaugePerInstance(
"websockets.count",
socket.client.conn.server.clientsCount
);
});
});
return socketio.to(channel).emit("entities", { SocketAuth(io, {
event: event.name, authenticate: async (socket, data, callback) => {
documentIds: [ const { token } = data;
{
id: document.id, try {
updatedAt: document.updatedAt, const user = await getUserForJWT(token);
}, socket.client.user = user;
],
collectionIds: [ // store the mapping between socket id and user id in redis
{ // so that it is accessible across multiple server nodes
id: document.collectionId, await client.hset(socket.id, "userId", user.id);
},
], return callback(null, true);
}); } catch (err) {
return callback(err);
} }
case "documents.delete": { },
const document = await Document.findByPk(event.documentId, { postAuthenticate: async (socket, data) => {
paranoid: false, const { user } = socket.client;
});
if (!document.publishedAt) { // the rooms associated with the current team
return socketio.to(`user-${document.createdById}`).emit("entities", { // and user so we can send authenticated events
event: event.name, let rooms = [`team-${user.teamId}`, `user-${user.id}`];
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
});
}
return socketio // the rooms associated with collections this user
.to(`collection-${document.collectionId}`) // has access to on connection. New collection subscriptions
.emit("entities", { // are managed from the client as needed through the 'join' event
event: event.name, const collectionIds = await user.collectionIds();
documentIds: [ collectionIds.forEach((collectionId) =>
{ rooms.push(`collection-${collectionId}`)
id: document.id, );
updatedAt: document.updatedAt,
},
],
collectionIds: [
{
id: document.collectionId,
},
],
});
}
case "documents.permanent_delete": {
return socketio
.to(`collection-${event.collectionId}`)
.emit(event.name, {
documentId: event.documentId,
});
}
case "documents.pin":
case "documents.unpin":
case "documents.update": {
const document = await Document.findByPk(event.documentId, {
paranoid: false,
});
const channel = document.publishedAt // join all of the rooms at once
? `collection-${document.collectionId}` socket.join(rooms);
: `user-${event.actorId}`;
return socketio.to(channel).emit("entities", { // allow the client to request to join rooms
event: event.name, socket.on("join", async (event) => {
documentIds: [ // user is joining a collection channel, because their permissions have
{ // changed, granting them access.
id: document.id, if (event.collectionId) {
updatedAt: document.updatedAt,
},
],
});
}
case "documents.create": {
const document = await Document.findByPk(event.documentId);
return socketio.to(`user-${event.actorId}`).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
collectionIds: [
{
id: document.collectionId,
},
],
});
}
case "documents.star":
case "documents.unstar": {
return socketio.to(`user-${event.actorId}`).emit(event.name, {
documentId: event.documentId,
});
}
case "documents.move": {
const documents = await Document.findAll({
where: {
id: event.data.documentIds,
},
paranoid: false,
});
documents.forEach((document) => {
socketio.to(`collection-${document.collectionId}`).emit("entities", {
event: event.name,
documentIds: [
{
id: document.id,
updatedAt: document.updatedAt,
},
],
});
});
event.data.collectionIds.forEach((collectionId) => {
socketio.to(`collection-${collectionId}`).emit("entities", {
event: event.name,
collectionIds: [{ id: collectionId }],
});
});
return;
}
case "collections.create": {
const collection = await Collection.findByPk(event.collectionId, {
paranoid: false,
});
socketio
.to(
collection.permission
? `team-${collection.teamId}`
: `collection-${collection.id}`
)
.emit("entities", {
event: event.name,
collectionIds: [
{
id: collection.id,
updatedAt: collection.updatedAt,
},
],
});
return socketio
.to(
collection.permission
? `team-${collection.teamId}`
: `collection-${collection.id}`
)
.emit("join", {
event: event.name,
collectionId: collection.id,
});
}
case "collections.update":
case "collections.delete": {
const collection = await Collection.findByPk(event.collectionId, {
paranoid: false,
});
return socketio.to(`team-${collection.teamId}`).emit("entities", {
event: event.name,
collectionIds: [
{
id: collection.id,
updatedAt: collection.updatedAt,
},
],
});
}
case "collections.move": {
return socketio
.to(`collection-${event.collectionId}`)
.emit("collections.update_index", {
collectionId: event.collectionId,
index: event.data.index,
});
}
case "collections.add_user": {
// the user being added isn't yet in the websocket channel for the collection
// so they need to be notified separately
socketio.to(`user-${event.userId}`).emit(event.name, {
event: event.name,
userId: event.userId,
collectionId: event.collectionId,
});
// let everyone with access to the collection know a user was added
socketio.to(`collection-${event.collectionId}`).emit(event.name, {
event: event.name,
userId: event.userId,
collectionId: event.collectionId,
});
// tell any user clients to connect to the websocket channel for the collection
return socketio.to(`user-${event.userId}`).emit("join", {
event: event.name,
collectionId: event.collectionId,
});
}
case "collections.remove_user": {
const membershipUserIds = await Collection.membershipUserIds(
event.collectionId
);
if (membershipUserIds.includes(event.userId)) {
// Even though we just removed a user from the collection
// the user still has access through some means
// treat this like an add, so that the client re-syncs policies
socketio.to(`user-${event.userId}`).emit("collections.add_user", {
event: "collections.add_user",
userId: event.userId,
collectionId: event.collectionId,
});
} else {
// let everyone with access to the collection know a user was removed
socketio
.to(`collection-${event.collectionId}`)
.emit("collections.remove_user", {
event: event.name,
userId: event.userId,
collectionId: event.collectionId,
});
// tell any user clients to disconnect from the websocket channel for the collection
socketio.to(`user-${event.userId}`).emit("leave", {
event: event.name,
collectionId: event.collectionId,
});
}
return;
}
case "collections.add_group": {
const group = await Group.findByPk(event.data.groupId);
// the users being added are not yet in the websocket channel for the collection
// so they need to be notified separately
for (const groupMembership of group.groupMemberships) {
socketio
.to(`user-${groupMembership.userId}`)
.emit("collections.add_user", {
event: event.name,
userId: groupMembership.userId,
collectionId: event.collectionId,
});
// tell any user clients to connect to the websocket channel for the collection
socketio.to(`user-${groupMembership.userId}`).emit("join", {
event: event.name,
collectionId: event.collectionId,
});
}
return;
}
case "collections.remove_group": {
const group = await Group.findByPk(event.data.groupId);
const membershipUserIds = await Collection.membershipUserIds(
event.collectionId
);
for (const groupMembership of group.groupMemberships) {
if (membershipUserIds.includes(groupMembership.userId)) {
// the user still has access through some means...
// treat this like an add, so that the client re-syncs policies
socketio
.to(`user-${groupMembership.userId}`)
.emit("collections.add_user", {
event: event.name,
userId: groupMembership.userId,
collectionId: event.collectionId,
});
} else {
// let users in the channel know they were removed
socketio
.to(`user-${groupMembership.userId}`)
.emit("collections.remove_user", {
event: event.name,
userId: groupMembership.userId,
collectionId: event.collectionId,
});
// tell any user clients to disconnect to the websocket channel for the collection
socketio.to(`user-${groupMembership.userId}`).emit("leave", {
event: event.name,
collectionId: event.collectionId,
});
}
}
return;
}
case "groups.create":
case "groups.update": {
const group = await Group.findByPk(event.modelId, {
paranoid: false,
});
return socketio.to(`team-${group.teamId}`).emit("entities", {
event: event.name,
groupIds: [
{
id: group.id,
updatedAt: group.updatedAt,
},
],
});
}
case "groups.add_user": {
// do an add user for every collection that the group is a part of
const collectionGroupMemberships = await CollectionGroup.findAll({
where: { groupId: event.modelId },
});
for (const collectionGroup of collectionGroupMemberships) {
// the user being added isn't yet in the websocket channel for the collection
// so they need to be notified separately
socketio.to(`user-${event.userId}`).emit("collections.add_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
// let everyone with access to the collection know a user was added
socketio
.to(`collection-${collectionGroup.collectionId}`)
.emit("collections.add_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
// tell any user clients to connect to the websocket channel for the collection
return socketio.to(`user-${event.userId}`).emit("join", {
event: event.name,
collectionId: collectionGroup.collectionId,
});
}
return;
}
case "groups.remove_user": {
const collectionGroupMemberships = await CollectionGroup.findAll({
where: { groupId: event.modelId },
});
for (const collectionGroup of collectionGroupMemberships) {
// if the user has any memberships remaining on the collection
// we need to emit add instead of remove
const collection = await Collection.scope({ const collection = await Collection.scope({
method: ["withMembership", event.userId], method: ["withMembership", user.id],
}).findByPk(collectionGroup.collectionId); }).findByPk(event.collectionId);
if (!collection) { if (can(user, "read", collection)) {
continue; socket.join(`collection-${event.collectionId}`, () => {
} metrics.increment("websockets.collections.join");
const hasMemberships =
collection.memberships.length > 0 ||
collection.collectionGroupMemberships.length > 0;
if (hasMemberships) {
// the user still has access through some means...
// treat this like an add, so that the client re-syncs policies
socketio.to(`user-${event.userId}`).emit("collections.add_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
} else {
// let everyone with access to the collection know a user was removed
socketio
.to(`collection-${collectionGroup.collectionId}`)
.emit("collections.remove_user", {
event: event.name,
userId: event.userId,
collectionId: collectionGroup.collectionId,
});
// tell any user clients to disconnect from the websocket channel for the collection
socketio.to(`user-${event.userId}`).emit("leave", {
event: event.name,
collectionId: collectionGroup.collectionId,
}); });
} }
} }
return;
}
case "groups.delete": {
const group = await Group.findByPk(event.modelId, {
paranoid: false,
});
socketio.to(`team-${group.teamId}`).emit("entities", { // user is joining a document channel, because they have navigated to
event: event.name, // view a document.
groupIds: [ if (event.documentId) {
{ const document = await Document.findByPk(event.documentId, {
id: group.id, userId: user.id,
updatedAt: group.updatedAt, });
},
],
});
// we the users and collection relations that were just severed as a result of the group deletion if (can(user, "read", document)) {
// since there are cascading deletes, we approximate this by looking for the recently deleted const room = `document-${event.documentId}`;
// items in the GroupUser and CollectionGroup tables
const groupUsers = await GroupUser.findAll({
paranoid: false,
where: {
groupId: event.modelId,
deletedAt: {
[Op.gt]: subHours(new Date(), 1),
},
},
});
const collectionGroupMemberships = await CollectionGroup.findAll({ await View.touch(event.documentId, user.id, event.isEditing);
paranoid: false, const editing = await View.findRecentlyEditingByDocument(
where: { event.documentId
groupId: event.modelId, );
deletedAt: {
[Op.gt]: subHours(new Date(), 1),
},
},
});
for (const collectionGroup of collectionGroupMemberships) { socket.join(room, () => {
const membershipUserIds = await Collection.membershipUserIds( metrics.increment("websockets.documents.join");
collectionGroup.collectionId
// let everyone else in the room know that a new user joined
io.to(room).emit("user.join", {
userId: user.id,
documentId: event.documentId,
isEditing: event.isEditing,
});
// let this user know who else is already present in the room
io.in(room).clients(async (err, sockets) => {
if (err) {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("clients", sockets);
Sentry.captureException(err);
});
} else {
console.error(err);
}
return;
}
// because a single user can have multiple socket connections we
// need to make sure that only unique userIds are returned. A Map
// makes this easy.
let userIds = new Map();
for (const socketId of sockets) {
const userId = await client.hget(socketId, "userId");
userIds.set(userId, userId);
}
socket.emit("document.presence", {
documentId: event.documentId,
userIds: Array.from(userIds.keys()),
editingIds: editing.map((view) => view.userId),
});
});
});
}
}
});
// allow the client to request to leave rooms
socket.on("leave", (event) => {
if (event.collectionId) {
socket.leave(`collection-${event.collectionId}`, () => {
metrics.increment("websockets.collections.leave");
});
}
if (event.documentId) {
const room = `document-${event.documentId}`;
socket.leave(room, () => {
metrics.increment("websockets.documents.leave");
io.to(room).emit("user.leave", {
userId: user.id,
documentId: event.documentId,
});
});
}
});
socket.on("disconnecting", () => {
const rooms = Object.keys(socket.rooms);
rooms.forEach((room) => {
if (room.startsWith("document-")) {
const documentId = room.replace("document-", "");
io.to(room).emit("user.leave", {
userId: user.id,
documentId,
});
}
});
});
socket.on("presence", async (event) => {
metrics.increment("websockets.presence");
const room = `document-${event.documentId}`;
if (event.documentId && socket.rooms[room]) {
const view = await View.touch(
event.documentId,
user.id,
event.isEditing
); );
view.user = user;
for (const groupUser of groupUsers) { io.to(room).emit("user.presence", {
if (membershipUserIds.includes(groupUser.userId)) { userId: user.id,
// the user still has access through some means... documentId: event.documentId,
// treat this like an add, so that the client re-syncs policies isEditing: event.isEditing,
socketio });
.to(`user-${groupUser.userId}`)
.emit("collections.add_user", {
event: event.name,
userId: groupUser.userId,
collectionId: collectionGroup.collectionId,
});
} else {
// let everyone with access to the collection know a user was removed
socketio
.to(`collection-${collectionGroup.collectionId}`)
.emit("collections.remove_user", {
event: event.name,
userId: groupUser.userId,
collectionId: collectionGroup.collectionId,
});
// tell any user clients to disconnect from the websocket channel for the collection
socketio.to(`user-${groupUser.userId}`).emit("leave", {
event: event.name,
collectionId: collectionGroup.collectionId,
});
}
}
} }
return; });
} },
});
default: websocketsQueue.process(async function websocketEventsProcessor(job) {
} const event = job.data;
} websockets.on(event, io).catch((error) => {
if (env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("event", event);
Sentry.captureException(error);
});
} else {
throw error;
}
});
});
} }

86
server/services/worker.js Normal file
View File

@@ -0,0 +1,86 @@
// @flow
import http from "http";
import debug from "debug";
import Koa from "koa";
import {
globalEventQueue,
processorEventQueue,
websocketsQueue,
emailsQueue,
} from "../queues";
import Backlinks from "../queues/processors/backlinks";
import Debouncer from "../queues/processors/debouncer";
import Emails from "../queues/processors/emails";
import Imports from "../queues/processors/imports";
import Notifications from "../queues/processors/notifications";
import Revisions from "../queues/processors/revisions";
import Slack from "../queues/processors/slack";
import Sentry from "../sentry";
const log = debug("queue");
const EmailsProcessor = new Emails();
const eventProcessors = {
backlinks: new Backlinks(),
debouncer: new Debouncer(),
imports: new Imports(),
notifications: new Notifications(),
revisions: new Revisions(),
slack: new Slack(),
};
export default function init(app: Koa, server?: http.Server) {
// this queue processes global events and hands them off to services
globalEventQueue.process(function (job) {
Object.keys(eventProcessors).forEach((name) => {
processorEventQueue.add(
{ ...job.data, service: name },
{ removeOnComplete: true }
);
});
websocketsQueue.add(job.data, { removeOnComplete: true });
});
processorEventQueue.process(function (job) {
const event = job.data;
const processor = eventProcessors[event.service];
if (!processor) {
console.warn(
`Received event for processor that isn't registered (${event.service})`
);
return;
}
if (processor.on) {
log(`${event.service} processing ${event.name}`);
processor.on(event).catch((error) => {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("event", event);
Sentry.captureException(error);
});
} else {
throw error;
}
});
}
});
emailsQueue.process(function (job) {
const event = job.data;
EmailsProcessor.on(event).catch((error) => {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("event", event);
Sentry.captureException(error);
});
} else {
throw error;
}
});
});
}

View File

@@ -1,5 +1,5 @@
// @flow // @flow
require("dotenv").config({ silent: true }); import "../env";
// test environment variables // test environment variables
process.env.DATABASE_URL = process.env.DATABASE_URL_TEST; process.env.DATABASE_URL = process.env.DATABASE_URL_TEST;
@@ -10,4 +10,4 @@ process.env.DEPLOYMENT = "";
process.env.ALLOWED_DOMAINS = "allowed-domain.com"; process.env.ALLOWED_DOMAINS = "allowed-domain.com";
// This is needed for the relative manual mock to be picked up // This is needed for the relative manual mock to be picked up
jest.mock("../events"); jest.mock("../queues");

13
server/tracing.js Normal file
View File

@@ -0,0 +1,13 @@
// @flow
export function initTracing() {
// If the DataDog agent is installed and the DD_API_KEY environment variable is
// in the environment then we can safely attempt to start the DD tracer
if (process.env.DD_API_KEY) {
require("dd-trace").init({
// SOURCE_COMMIT is used by Docker Hub
// SOURCE_VERSION is used by Heroku
version: process.env.SOURCE_COMMIT || process.env.SOURCE_VERSION,
});
}
}

View File

@@ -10,3 +10,189 @@ export type ContextWithState = {|
authType: "app" | "api", authType: "app" | "api",
}, },
|}; |};
export type UserEvent =
| {
name: | "users.create" // eslint-disable-line
| "users.signin"
| "users.update"
| "users.suspend"
| "users.activate"
| "users.delete",
userId: string,
teamId: string,
actorId: string,
ip: string,
}
| {
name: "users.invite",
teamId: string,
actorId: string,
data: {
email: string,
name: string,
},
ip: string,
};
export type DocumentEvent =
| {
name: | "documents.create" // eslint-disable-line
| "documents.publish"
| "documents.delete"
| "documents.permanent_delete"
| "documents.pin"
| "documents.unpin"
| "documents.archive"
| "documents.unarchive"
| "documents.restore"
| "documents.star"
| "documents.unstar",
documentId: string,
collectionId: string,
teamId: string,
actorId: string,
ip: string,
data: {
title: string,
source?: "import",
},
}
| {
name: "documents.move",
documentId: string,
collectionId: string,
teamId: string,
actorId: string,
data: {
collectionIds: string[],
documentIds: string[],
},
ip: string,
}
| {
name: | "documents.update" // eslint-disable-line
| "documents.update.delayed"
| "documents.update.debounced",
documentId: string,
collectionId: string,
createdAt: string,
teamId: string,
actorId: string,
data: {
title: string,
autosave: boolean,
done: boolean,
},
ip: string,
}
| {
name: "documents.title_change",
documentId: string,
collectionId: string,
createdAt: string,
teamId: string,
actorId: string,
data: {
title: string,
previousTitle: string,
},
ip: string,
};
export type RevisionEvent = {
name: "revisions.create",
documentId: string,
collectionId: string,
teamId: string,
};
export type CollectionImportEvent = {
name: "collections.import",
modelId: string,
teamId: string,
actorId: string,
data: { type: "outline" },
ip: string,
};
export type CollectionEvent =
| {
name: | "collections.create" // eslint-disable-line
| "collections.update"
| "collections.delete",
collectionId: string,
teamId: string,
actorId: string,
data: { name: string },
ip: string,
}
| {
name: "collections.add_user" | "collections.remove_user",
userId: string,
collectionId: string,
teamId: string,
actorId: string,
ip: string,
}
| {
name: "collections.add_group" | "collections.remove_group",
collectionId: string,
teamId: string,
actorId: string,
data: { name: string, groupId: string },
ip: string,
}
| {
name: "collections.move",
collectionId: string,
teamId: string,
actorId: string,
data: { index: string },
ip: string,
};
export type GroupEvent =
| {
name: "groups.create" | "groups.delete" | "groups.update",
actorId: string,
modelId: string,
teamId: string,
data: { name: string },
ip: string,
}
| {
name: "groups.add_user" | "groups.remove_user",
actorId: string,
userId: string,
modelId: string,
teamId: string,
data: { name: string },
ip: string,
};
export type IntegrationEvent = {
name: "integrations.create" | "integrations.update",
modelId: string,
teamId: string,
actorId: string,
ip: string,
};
export type TeamEvent = {
name: "teams.update",
teamId: string,
actorId: string,
data: Object,
ip: string,
};
export type Event =
| UserEvent
| DocumentEvent
| CollectionEvent
| CollectionImportEvent
| IntegrationEvent
| GroupEvent
| RevisionEvent
| TeamEvent;

View File

@@ -1,4 +1,5 @@
// @flow // @flow
import chalk from "chalk";
import { Team, AuthenticationProvider } from "../models"; import { Team, AuthenticationProvider } from "../models";
export async function checkMigrations() { export async function checkMigrations() {
@@ -19,3 +20,103 @@ $ node ./build/server/scripts/20210226232041-migrate-authentication.js
process.exit(1); process.exit(1);
} }
} }
export function checkEnv() {
const errors = [];
if (
!process.env.SECRET_KEY ||
process.env.SECRET_KEY === "generate_a_new_key"
) {
errors.push(
`The ${chalk.bold(
"SECRET_KEY"
)} env variable must be set with the output of ${chalk.bold(
"$ openssl rand -hex 32"
)}`
);
}
if (
!process.env.UTILS_SECRET ||
process.env.UTILS_SECRET === "generate_a_new_key"
) {
errors.push(
`The ${chalk.bold(
"UTILS_SECRET"
)} env variable must be set with a secret value, it is recommended to use the output of ${chalk.bold(
"$ openssl rand -hex 32"
)}`
);
}
if (process.env.AWS_ACCESS_KEY_ID) {
[
"AWS_REGION",
"AWS_SECRET_ACCESS_KEY",
"AWS_S3_UPLOAD_BUCKET_URL",
"AWS_S3_UPLOAD_MAX_SIZE",
].forEach((key) => {
if (!process.env[key]) {
errors.push(
`The ${chalk.bold(
key
)} env variable must be set when using S3 compatible storage`
);
}
});
}
if (!process.env.URL) {
errors.push(
`The ${chalk.bold(
"URL"
)} env variable must be set to the fully qualified, externally accessible URL, e.g https://wiki.mycompany.com`
);
}
if (!process.env.DATABASE_URL && !process.env.DATABASE_CONNECTION_POOL_URL) {
errors.push(
`The ${chalk.bold(
"DATABASE_URL"
)} env variable must be set to the location of your postgres server, including username, password, and port`
);
}
if (!process.env.REDIS_URL) {
errors.push(
`The ${chalk.bold(
"REDIS_URL"
)} env variable must be set to the location of your redis server, including username, password, and port`
);
}
if (errors.length) {
console.log(
chalk.bold.red(
"\n\nThe server could not start, please fix the following configuration errors and try again:\n"
)
);
errors.map((text) => console.log(` - ${text}`));
console.log("\n");
process.exit(1);
}
if (process.env.NODE_ENV === "production") {
console.log(
chalk.green(
`
Is your team enjoying Outline? Consider supporting future development by sponsoring the project:\n\nhttps://github.com/sponsors/outline
`
)
);
} else if (process.env.NODE_ENV === "development") {
console.log(
chalk.yellow(
`\nRunning Outline in development mode. To run Outline in production mode set the ${chalk.bold(
"NODE_ENV"
)} env variable to "production"\n`
)
);
}
}

View File

@@ -10,7 +10,7 @@ import { client } from "../redis";
const UPDATES_URL = "https://updates.getoutline.com"; const UPDATES_URL = "https://updates.getoutline.com";
const UPDATES_KEY = "UPDATES_KEY"; const UPDATES_KEY = "UPDATES_KEY";
export default async () => { export async function checkUpdates() {
invariant( invariant(
process.env.SECRET_KEY && process.env.URL, process.env.SECRET_KEY && process.env.URL,
"SECRET_KEY or URL env var is not set" "SECRET_KEY or URL env var is not set"
@@ -68,4 +68,4 @@ export default async () => {
} catch (_e) { } catch (_e) {
// no-op // no-op
} }
}; }