feat: Normalized server logging (#2567)

* feat: Normalize logging

* Remove scattered console.error + Sentry.captureException

* Remove mention of debug

* cleanup dev output

* Edge cases, docs

* Refactor: Move logger, metrics, sentry under 'logging' folder.
Trying to reduce the amount of things under generic 'utils'

* cleanup, last few console calls
This commit is contained in:
Tom Moor
2021-09-14 18:04:35 -07:00
committed by GitHub
parent 6c605cf720
commit 83a61b87ed
36 changed files with 508 additions and 264 deletions

View File

@@ -1,9 +1,9 @@
// @flow
import querystring from "querystring";
import * as Sentry from "@sentry/node";
import { addMonths } from "date-fns";
import { type Context } from "koa";
import { pick } from "lodash";
import Logger from "../logging/logger";
import { User, Event, Team, Collection, View } from "../models";
import { getCookieDomain } from "../utils/domains";
@@ -37,8 +37,8 @@ export async function signIn(
["ref", "utm_content", "utm_medium", "utm_source", "utm_campaign"]
);
await team.update({ signupQueryParams });
} catch (err) {
Sentry.captureException(err);
} catch (error) {
Logger.error(`Error persisting signup query params`, error);
}
}
}

View File

@@ -1,45 +0,0 @@
// @flow
import metrics from "datadog-metrics";
if (process.env.DD_API_KEY) {
metrics.init({
apiKey: process.env.DD_API_KEY,
prefix: "outline.",
defaultTags: [`env:${process.env.DD_ENV || process.env.NODE_ENV}`],
});
}
export function gauge(key: string, value: number, tags?: string[]): void {
if (!process.env.DD_API_KEY) {
return;
}
return metrics.gauge(key, value, tags);
}
export function gaugePerInstance(
key: string,
value: number,
tags?: string[] = []
): void {
if (!process.env.DD_API_KEY) {
return;
}
const instanceId = process.env.INSTANCE_ID || process.env.HEROKU_DYNO_ID;
if (!instanceId) {
throw new Error(
"INSTANCE_ID or HEROKU_DYNO_ID must be set when using Datadog"
);
}
return metrics.gauge(key, value, [...tags, `instance:${instanceId}`]);
}
export function increment(key: string, tags?: { [string]: string }): void {
if (!process.env.DD_API_KEY) {
return;
}
return metrics.increment(key, tags);
}

View File

@@ -2,9 +2,8 @@
import Queue from "bull";
import Redis from "ioredis";
import { snakeCase } from "lodash";
import Metrics from "../logging/metrics";
import { client, subscriber } from "../redis";
import * as metrics from "../utils/metrics";
import Sentry from "./sentry";
export function createQueue(name: string) {
const prefix = `queue.${snakeCase(name)}`;
@@ -26,29 +25,24 @@ export function createQueue(name: string) {
});
queue.on("stalled", () => {
metrics.increment(`${prefix}.jobs.stalled`);
Metrics.increment(`${prefix}.jobs.stalled`);
});
queue.on("completed", () => {
metrics.increment(`${prefix}.jobs.completed`);
Metrics.increment(`${prefix}.jobs.completed`);
});
queue.on("error", (err) => {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
} else {
console.error(err);
}
metrics.increment(`${prefix}.jobs.errored`);
Metrics.increment(`${prefix}.jobs.errored`);
});
queue.on("failed", () => {
metrics.increment(`${prefix}.jobs.failed`);
Metrics.increment(`${prefix}.jobs.failed`);
});
setInterval(async () => {
metrics.gauge(`${prefix}.count`, await queue.count());
metrics.gauge(`${prefix}.delayed_count`, await queue.getDelayedCount());
Metrics.gauge(`${prefix}.count`, await queue.count());
Metrics.gauge(`${prefix}.delayed_count`, await queue.getDelayedCount());
}, 5 * 1000);
return queue;

View File

@@ -1,10 +1,10 @@
// @flow
import crypto from "crypto";
import * as Sentry from "@sentry/node";
import AWS from "aws-sdk";
import { addHours, format } from "date-fns";
import fetch from "fetch-with-proxy";
import { v4 as uuidv4 } from "uuid";
import Logger from "../logging/logger";
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
@@ -147,15 +147,11 @@ export const uploadToS3FromUrl = async (
const endpoint = publicS3Endpoint(true);
return `${endpoint}/${key}`;
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err, {
extra: {
url,
},
});
} else {
throw err;
}
Logger.error("Error uploading to S3 from URL", err, {
url,
key,
acl,
});
}
};
@@ -198,10 +194,8 @@ export const getFileByKey = async (key: string) => {
const data = await s3.getObject(params).promise();
return data.Body;
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
} else {
throw err;
}
Logger.error("Error getting file from S3 by key", err, {
key,
});
}
};

View File

@@ -1,57 +0,0 @@
// @flow
import * as Sentry from "@sentry/node";
import env from "../env";
import type { ContextWithState } from "../types";
if (env.SENTRY_DSN) {
Sentry.init({
dsn: env.SENTRY_DSN,
environment: env.ENVIRONMENT,
release: env.RELEASE,
maxBreadcrumbs: 0,
ignoreErrors: [
// emitted by Koa when bots attempt to snoop on paths such as wp-admin
// or the user client submits a bad request. These are expected in normal
// running of the application and don't need to be reported.
"BadRequestError",
"UnauthorizedError",
],
});
}
export function requestErrorHandler(error: any, ctx: ContextWithState) {
// we don't need to report every time a request stops to the bug tracker
if (error.code === "EPIPE" || error.code === "ECONNRESET") {
console.warn("Connection error", { error });
return;
}
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
const requestId = ctx.headers["x-request-id"];
if (requestId) {
scope.setTag("request_id", requestId);
}
const authType = ctx.state ? ctx.state.authType : undefined;
if (authType) {
scope.setTag("auth_type", authType);
}
const userId =
ctx.state && ctx.state.user ? ctx.state.user.id : undefined;
if (userId) {
scope.setUser({ id: userId });
}
scope.addEventProcessor(function (event) {
return Sentry.Handlers.parseRequest(event, ctx.request);
});
Sentry.captureException(error);
});
} else {
console.error(error);
}
}
export default Sentry;

View File

@@ -1,5 +1,6 @@
// @flow
import chalk from "chalk";
import Logger from "../logging/logger";
import { Team, AuthenticationProvider } from "../models";
export async function checkMigrations() {
@@ -11,12 +12,14 @@ export async function checkMigrations() {
const providers = await AuthenticationProvider.count();
if (teams && !providers) {
console.error(`
Logger.warn(
`
This version of Outline cannot start until a data migration is complete.
Backup your database, run the database migrations and the following script:
$ node ./build/server/scripts/20210226232041-migrate-authentication.js
`);
`
);
process.exit(1);
}
}
@@ -92,18 +95,16 @@ export function checkEnv() {
}
if (errors.length) {
console.log(
chalk.bold.red(
"\n\nThe server could not start, please fix the following configuration errors and try again:\n"
)
Logger.warn(
"\n\nThe server could not start, please fix the following configuration errors and try again:\n" +
errors.map((e) => `- ${e}`).join("\n")
);
errors.map((text) => console.log(` - ${text}`));
console.log("\n");
process.exit(1);
}
if (process.env.NODE_ENV === "production") {
console.log(
Logger.info(
"lifecycle",
chalk.green(
`
Is your team enjoying Outline? Consider supporting future development by sponsoring the project:\n\nhttps://github.com/sponsors/outline
@@ -111,12 +112,12 @@ Is your team enjoying Outline? Consider supporting future development by sponsor
)
);
} else if (process.env.NODE_ENV === "development") {
console.log(
chalk.yellow(
`\nRunning Outline in development mode. To run Outline in production mode set the ${chalk.bold(
"NODE_ENV"
)} env variable to "production"\n`
)
Logger.warn(
`Running Outline in ${chalk.bold(
"development mode"
)}. To run Outline in production mode set the ${chalk.bold(
"NODE_ENV"
)} env variable to "production"`
);
}
}

View File

@@ -1,8 +1,8 @@
// @flow
import fs from "fs";
import * as Sentry from "@sentry/node";
import JSZip from "jszip";
import tmp from "tmp";
import Logger from "../logging/logger";
import { Attachment, Collection, Document } from "../models";
import { serializeFilename } from "./fs";
import { getFileByKey } from "./s3";
@@ -47,11 +47,9 @@ async function addImageToArchive(zip, key) {
const img = await getFileByKey(key);
zip.file(key, img, { createFolders: true });
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
}
// error during file retrieval
console.error(err);
Logger.error("Error loading image attachment from S3", err, {
key,
});
}
}