Handle promise linting (#5488)

This commit is contained in:
Tom Moor
2023-06-28 20:18:18 -04:00
committed by GitHub
parent f3d8129a13
commit 89d5527d39
101 changed files with 395 additions and 343 deletions

View File

@@ -43,7 +43,7 @@ export default async function subscriptionCreator({
// If the subscription was deleted, then just restore the existing row.
if (subscription.deletedAt && resubscribe) {
subscription.restore({ transaction });
await subscription.restore({ transaction });
await Event.create(
{

View File

@@ -41,7 +41,7 @@ export class Mailer {
"SMTP_USERNAME not provided, generating test account…"
);
this.getTestTransportOptions().then((options) => {
void this.getTestTransportOptions().then((options) => {
if (!options) {
Logger.info(
"email",

View File

@@ -53,7 +53,7 @@ async function master() {
await checkPendingMigrations();
if (env.TELEMETRY && env.ENVIRONMENT === "production") {
checkUpdates();
void checkUpdates();
setInterval(checkUpdates, 24 * 3600 * 1000);
}
}
@@ -166,7 +166,7 @@ async function start(id: number, disconnect: () => void) {
process.once("SIGINT", () => ShutdownHelper.execute());
}
throng({
void throng({
master,
worker: start,
count: processCount,

View File

@@ -6,6 +6,7 @@ import winston from "winston";
import env from "@server/env";
import Metrics from "@server/logging/Metrics";
import Sentry from "@server/logging/sentry";
import ShutdownHelper from "@server/utils/ShutdownHelper";
import * as Tracing from "./tracer";
const isProduction = env.ENVIRONMENT === "production";
@@ -162,6 +163,18 @@ class Logger {
}
}
/**
* Report a fatal error and shut down the server
*
* @param message A description of the error
* @param error The error that occurred
* @param extra Arbitrary data to be logged that will appear in prod logs
*/
public fatal(message: string, error: Error, extra?: Extra) {
this.error(message, error, extra);
void ShutdownHelper.execute();
}
/**
* Sanitize data attached to logs and errors to remove sensitive information.
*

View File

@@ -64,7 +64,7 @@ class View extends IdModel {
if (!created) {
model.count += 1;
model.save(options);
await model.save(options);
}
return model;

View File

@@ -174,7 +174,7 @@ This is a new paragraph.
| | | |`,
});
const text = await DocumentHelper.toPlainText(revision);
const text = DocumentHelper.toPlainText(revision);
// Strip all formatting
expect(text).toEqual(`This is a test paragraph

View File

@@ -13,7 +13,7 @@ export default class DebounceProcessor extends BaseProcessor {
async perform(event: Event) {
switch (event.name) {
case "documents.update": {
globalEventQueue.add(
await globalEventQueue.add(
{ ...event, name: "documents.update.delayed" },
{
// speed up revision creation in development, we don't have all the
@@ -41,7 +41,10 @@ export default class DebounceProcessor extends BaseProcessor {
return;
}
globalEventQueue.add({ ...event, name: "documents.update.debounced" });
await globalEventQueue.add({
...event,
name: "documents.update.debounced",
});
break;
}

View File

@@ -44,7 +44,7 @@ export default abstract class ExportDocumentTreeTask extends ExportTask {
let text =
format === FileOperationFormat.HTMLZip
? await DocumentHelper.toHTML(document, { centered: true })
: await DocumentHelper.toMarkdown(document);
: DocumentHelper.toMarkdown(document);
const attachmentIds = includeAttachments
? parseAttachmentIds(document.text)

View File

@@ -135,7 +135,7 @@ export default class ImportJSONTask extends ImportTask {
});
if (Object.values(item.documents).length) {
await mapDocuments(item.documents, collectionId);
mapDocuments(item.documents, collectionId);
}
if (Object.values(item.attachments).length) {

View File

@@ -338,7 +338,7 @@ describe("revisions.create", () => {
enabled: true,
});
subscription.destroy();
await subscription.destroy();
const task = new RevisionCreatedNotificationsTask();

View File

@@ -21,6 +21,7 @@ import {
ValidationError,
IncorrectEditionError,
} from "@server/errors";
import Logger from "@server/logging/Logger";
import auth from "@server/middlewares/authentication";
import { rateLimiter } from "@server/middlewares/rateLimiter";
import validate from "@server/middlewares/validate";
@@ -815,13 +816,15 @@ router.post(
// When requesting subsequent pages of search results we don't want to record
// duplicate search query records
if (offset === 0) {
SearchQuery.create({
void SearchQuery.create({
userId: user?.id,
teamId,
shareId,
source: ctx.state.auth.type || "app", // we'll consider anything that isn't "api" to be "app"
query,
results: totalCount,
}).catch((err) => {
Logger.error("Failed to create search query", err);
});
}

View File

@@ -55,5 +55,5 @@ export default async function main(exit = false) {
} // In the test suite we import the script rather than run via node CLI
if (process.env.NODE_ENV !== "test") {
main(true);
void main(true);
}

View File

@@ -49,5 +49,5 @@ export default async function main(exit = false) {
}
if (process.env.NODE_ENV !== "test") {
main(true);
void main(true);
}

View File

@@ -81,5 +81,5 @@ export default async function main(exit = false) {
}
if (process.env.NODE_ENV !== "test") {
main(true);
void main(true);
}

View File

@@ -67,5 +67,5 @@ export default async function main(exit = false) {
}
if (process.env.NODE_ENV !== "test") {
main(true);
void main(true);
}

View File

@@ -58,5 +58,5 @@ export default async function main(exit = false) {
} // In the test suite we import the script rather than run via node CLI
if (process.env.NODE_ENV !== "test") {
main(true);
void main(true);
}

View File

@@ -48,5 +48,5 @@ export default async function main(exit = false) {
}
if (process.env.NODE_ENV !== "test") {
main(true);
void main(true);
}

View File

@@ -18,7 +18,7 @@ export default function init() {
// Just give everything time to startup before running the first time. Not
// _technically_ required to function.
setTimeout(() => {
run(TaskSchedule.Daily);
run(TaskSchedule.Hourly);
void run(TaskSchedule.Daily);
void run(TaskSchedule.Hourly);
}, 30 * Second);
}

View File

@@ -57,8 +57,8 @@ if (env.CDN_URL) {
defaultSrc.push(env.CDN_URL);
}
export default function init(app: Koa = new Koa(), server?: Server): Koa {
initI18n();
export default function init(app: Koa = new Koa(), server?: Server) {
void initI18n();
if (isProduction) {
// Force redirect to HTTPS protocol unless explicitly disabled

View File

@@ -130,23 +130,27 @@ export default function init(
// Handle events from event queue that should be sent to the clients down ws
const websockets = new WebsocketsProcessor();
websocketQueue.process(
traceFunction({
serviceName: "websockets",
spanName: "process",
isRoot: true,
})(async function (job) {
const event = job.data;
websocketQueue
.process(
traceFunction({
serviceName: "websockets",
spanName: "process",
isRoot: true,
})(async function (job) {
const event = job.data;
Tracing.setResource(`Processor.WebsocketsProcessor`);
Tracing.setResource(`Processor.WebsocketsProcessor`);
websockets.perform(event, io).catch((error) => {
Logger.error("Error processing websocket event", error, {
event,
websockets.perform(event, io).catch((error) => {
Logger.error("Error processing websocket event", error, {
event,
});
});
});
})
);
})
)
.catch((err) => {
Logger.fatal("Error starting websocketQueue", err);
});
}
async function authenticated(io: IO.Server, socket: SocketWithAuth) {
@@ -168,9 +172,6 @@ async function authenticated(io: IO.Server, socket: SocketWithAuth) {
rooms.push(`collection-${collectionId}`)
);
// join all of the rooms at once
socket.join(rooms);
// allow the client to request to join rooms
socket.on("join", async (event) => {
// user is joining a collection channel, because their permissions have
@@ -194,6 +195,9 @@ async function authenticated(io: IO.Server, socket: SocketWithAuth) {
Metrics.increment("websockets.collections.leave");
}
});
// join all of the rooms at once
await socket.join(rooms);
}
/**

View File

@@ -12,128 +12,144 @@ import processors from "../queues/processors";
import tasks from "../queues/tasks";
export default function init() {
initI18n();
void initI18n();
// This queue processes the global event bus
globalEventQueue.process(
traceFunction({
serviceName: "worker",
spanName: "process",
isRoot: true,
})(async function (job) {
const event = job.data;
let err;
globalEventQueue
.process(
traceFunction({
serviceName: "worker",
spanName: "process",
isRoot: true,
})(async function (job) {
const event = job.data;
let err;
setResource(`Event.${event.name}`);
setResource(`Event.${event.name}`);
Logger.info("worker", `Processing ${event.name}`, {
event,
attempt: job.attemptsMade,
});
Logger.info("worker", `Processing ${event.name}`, {
event,
attempt: job.attemptsMade,
});
// For each registered processor we check to see if it wants to handle the
// event (applicableEvents), and if so add a new queued job specifically
// for that processor.
for (const name in processors) {
// For each registered processor we check to see if it wants to handle the
// event (applicableEvents), and if so add a new queued job specifically
// for that processor.
for (const name in processors) {
const ProcessorClass = processors[name];
if (!ProcessorClass) {
throw new Error(
`Received event "${event.name}" for processor (${name}) that isn't registered. Check the file name matches the class name.`
);
}
try {
if (name === "WebsocketsProcessor") {
// websockets are a special case on their own queue because they must
// only be consumed by the websockets service rather than workers.
await websocketQueue.add(job.data);
} else if (
ProcessorClass.applicableEvents.includes(event.name) ||
ProcessorClass.applicableEvents.includes("*")
) {
await processorEventQueue.add({ event, name });
}
} catch (error) {
Logger.error(
`Error adding ${event.name} to ${name} queue`,
error,
event
);
err = error;
}
}
if (err) {
throw err;
}
})
)
.catch((err) => {
Logger.fatal("Error starting globalEventQueue", err);
});
// Jobs for individual processors are processed here. Only applicable events
// as unapplicable events were filtered in the global event queue above.
processorEventQueue
.process(
traceFunction({
serviceName: "worker",
spanName: "process",
isRoot: true,
})(async function (job) {
const { event, name } = job.data;
const ProcessorClass = processors[name];
setResource(`Processor.${name}`);
if (!ProcessorClass) {
throw new Error(
`Received event "${event.name}" for processor (${name}) that isn't registered. Check the file name matches the class name.`
);
}
try {
if (name === "WebsocketsProcessor") {
// websockets are a special case on their own queue because they must
// only be consumed by the websockets service rather than workers.
await websocketQueue.add(job.data);
} else if (
ProcessorClass.applicableEvents.includes(event.name) ||
ProcessorClass.applicableEvents.includes("*")
) {
await processorEventQueue.add({ event, name });
const processor = new ProcessorClass();
if (processor.perform) {
Logger.info("worker", `${name} running ${event.name}`, {
event,
});
try {
await processor.perform(event);
} catch (err) {
Logger.error(
`Error processing ${event.name} in ${name}`,
err,
event
);
throw err;
}
} catch (error) {
Logger.error(
`Error adding ${event.name} to ${name} queue`,
error,
event
);
err = error;
}
}
if (err) {
throw err;
}
})
);
// Jobs for individual processors are processed here. Only applicable events
// as unapplicable events were filtered in the global event queue above.
processorEventQueue.process(
traceFunction({
serviceName: "worker",
spanName: "process",
isRoot: true,
})(async function (job) {
const { event, name } = job.data;
const ProcessorClass = processors[name];
setResource(`Processor.${name}`);
if (!ProcessorClass) {
throw new Error(
`Received event "${event.name}" for processor (${name}) that isn't registered. Check the file name matches the class name.`
);
}
const processor = new ProcessorClass();
if (processor.perform) {
Logger.info("worker", `${name} running ${event.name}`, {
event,
});
try {
await processor.perform(event);
} catch (err) {
Logger.error(`Error processing ${event.name} in ${name}`, err, event);
throw err;
}
}
})
);
})
)
.catch((err) => {
Logger.fatal("Error starting processorEventQueue", err);
});
// Jobs for async tasks are processed here.
taskQueue.process(
traceFunction({
serviceName: "worker",
spanName: "process",
isRoot: true,
})(async function (job) {
const { name, props } = job.data;
const TaskClass = tasks[name];
taskQueue
.process(
traceFunction({
serviceName: "worker",
spanName: "process",
isRoot: true,
})(async function (job) {
const { name, props } = job.data;
const TaskClass = tasks[name];
setResource(`Task.${name}`);
setResource(`Task.${name}`);
if (!TaskClass) {
throw new Error(
`Task "${name}" is not registered. Check the file name matches the class name.`
);
}
if (!TaskClass) {
throw new Error(
`Task "${name}" is not registered. Check the file name matches the class name.`
);
}
Logger.info("worker", `${name} running`, props);
Logger.info("worker", `${name} running`, props);
const task = new TaskClass();
const task = new TaskClass();
try {
await task.perform(props);
} catch (err) {
Logger.error(`Error processing task in ${name}`, err, props);
throw err;
}
})
);
try {
await task.perform(props);
} catch (err) {
Logger.error(`Error processing task in ${name}`, err, props);
throw err;
}
})
)
.catch((err) => {
Logger.fatal("Error starting taskQueue", err);
});
}

View File

@@ -60,7 +60,7 @@ export async function signIn(
await user.updateSignedIn(ctx.request.ip);
// don't await event creation for a faster sign-in
Event.create({
void Event.create({
name: "users.signin",
actorId: user.id,
userId: user.id,

View File

@@ -25,9 +25,10 @@ export function opts(user?: User | null) {
*
* @returns i18n instance
*/
export function initI18n() {
export async function initI18n() {
const lng = unicodeCLDRtoBCP47(env.DEFAULT_LANGUAGE);
i18n.use(backend).init({
i18n.use(backend);
await i18n.init({
compatibilityJSON: "v3",
backend: {
loadPath: (language: string) =>