chore(deps-dev): bump prettier from 2.1.2 to 2.8.8 (#5372)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Tom Moor <tom.moor@gmail.com>
This commit is contained in:
@@ -7,7 +7,7 @@ const emptyFn = function () {};
|
||||
const callableHandlers = {
|
||||
get<T, P extends keyof T>(_target: T, _prop: P, _receiver: any): T[P] {
|
||||
const newMock = new Proxy(emptyFn, callableHandlers);
|
||||
return (newMock as any) as T[P];
|
||||
return newMock as any as T[P];
|
||||
},
|
||||
|
||||
apply<T extends (...args: any) => any, A extends Parameters<T>>(
|
||||
@@ -16,7 +16,7 @@ const callableHandlers = {
|
||||
_args: A
|
||||
): ReturnType<T> {
|
||||
const newMock = new Proxy(emptyFn, callableHandlers);
|
||||
return (newMock as any) as ReturnType<T>;
|
||||
return newMock as any as ReturnType<T>;
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -88,9 +88,8 @@ export default class PersistenceExtension implements Extension {
|
||||
// Find the collaborators that have modified the document since it was last
|
||||
// persisted and clear the map, if there's no collaborators then we don't
|
||||
// need to persist the document.
|
||||
const documentCollaboratorIds = this.documentCollaboratorIds.get(
|
||||
documentName
|
||||
);
|
||||
const documentCollaboratorIds =
|
||||
this.documentCollaboratorIds.get(documentName);
|
||||
if (!documentCollaboratorIds) {
|
||||
Logger.debug("multiplayer", `No changes for ${documentName}`);
|
||||
return;
|
||||
|
||||
@@ -30,8 +30,7 @@ const importMapping: ImportableFile[] = [
|
||||
getMarkdown: docxToMarkdown,
|
||||
},
|
||||
{
|
||||
type:
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
type: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
getMarkdown: docxToMarkdown,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -171,12 +171,12 @@ class Logger {
|
||||
|
||||
if (isString(input)) {
|
||||
if (sensitiveFields.some((field) => input.includes(field))) {
|
||||
return ("[Filtered]" as any) as T;
|
||||
return "[Filtered]" as any as T;
|
||||
}
|
||||
}
|
||||
|
||||
if (isArray(input)) {
|
||||
return (input.map(this.sanitize) as any) as T;
|
||||
return input.map(this.sanitize) as any as T;
|
||||
}
|
||||
|
||||
if (isObject(input)) {
|
||||
|
||||
@@ -26,7 +26,7 @@ import env from "@server/env";
|
||||
import tracer from "./tracer";
|
||||
import * as Tracing from "./tracer";
|
||||
|
||||
type DDTag = typeof DDTags[keyof typeof DDTags];
|
||||
type DDTag = (typeof DDTags)[keyof typeof DDTags];
|
||||
|
||||
type Tags = {
|
||||
[tag in DDTag]?: any;
|
||||
@@ -55,72 +55,74 @@ interface TraceConfig {
|
||||
*
|
||||
* @param config Optional configuration for the span that will be created for this trace.
|
||||
*/
|
||||
export const traceFunction = (config: TraceConfig) => <
|
||||
F extends (...args: any[]) => any,
|
||||
P extends Parameters<F>,
|
||||
R extends ReturnType<F>
|
||||
>(
|
||||
target: F
|
||||
): F =>
|
||||
env.ENVIRONMENT === "test"
|
||||
? target
|
||||
: (function wrapperFn(this: any, ...args: P): R {
|
||||
const { className, methodName = target.name, tags } = config;
|
||||
const childOf = config.isRoot
|
||||
? undefined
|
||||
: tracer.scope().active() || undefined;
|
||||
export const traceFunction =
|
||||
(config: TraceConfig) =>
|
||||
<
|
||||
F extends (...args: any[]) => any,
|
||||
P extends Parameters<F>,
|
||||
R extends ReturnType<F>
|
||||
>(
|
||||
target: F
|
||||
): F =>
|
||||
env.ENVIRONMENT === "test"
|
||||
? target
|
||||
: (function wrapperFn(this: any, ...args: P): R {
|
||||
const { className, methodName = target.name, tags } = config;
|
||||
const childOf = config.isRoot
|
||||
? undefined
|
||||
: tracer.scope().active() || undefined;
|
||||
|
||||
const spanName = config.spanName || className || "DEFAULT_SPAN_NAME";
|
||||
const spanName = config.spanName || className || "DEFAULT_SPAN_NAME";
|
||||
|
||||
const resourceName = config.resourceName
|
||||
? config.resourceName
|
||||
: methodName;
|
||||
const spanOptions: SpanOptions = {
|
||||
childOf,
|
||||
tags: {
|
||||
[DDTags.RESOURCE_NAME]: resourceName,
|
||||
...tags,
|
||||
},
|
||||
};
|
||||
const resourceName = config.resourceName
|
||||
? config.resourceName
|
||||
: methodName;
|
||||
const spanOptions: SpanOptions = {
|
||||
childOf,
|
||||
tags: {
|
||||
[DDTags.RESOURCE_NAME]: resourceName,
|
||||
...tags,
|
||||
},
|
||||
};
|
||||
|
||||
const span = tracer.startSpan(spanName, spanOptions);
|
||||
const span = tracer.startSpan(spanName, spanOptions);
|
||||
|
||||
if (!span) {
|
||||
return target.call(this, ...args);
|
||||
}
|
||||
|
||||
if (config.serviceName) {
|
||||
span.setTag(
|
||||
DDTags.SERVICE_NAME,
|
||||
`${env.DD_SERVICE}-${config.serviceName}`
|
||||
);
|
||||
}
|
||||
|
||||
if (config.makeSearchable) {
|
||||
span.setTag(DDTags.ANALYTICS, true);
|
||||
}
|
||||
|
||||
// The callback fn needs to be wrapped in an arrow fn as the activate fn clobbers `this`
|
||||
return tracer.scope().activate(span, () => {
|
||||
const output = target.call(this, ...args);
|
||||
|
||||
if (output && typeof output.then === "function") {
|
||||
output
|
||||
.catch((error: Error | undefined) => {
|
||||
if (error instanceof Error) {
|
||||
Tracing.setError(error, span);
|
||||
}
|
||||
})
|
||||
.finally(() => {
|
||||
span.finish();
|
||||
});
|
||||
} else {
|
||||
span.finish();
|
||||
if (!span) {
|
||||
return target.call(this, ...args);
|
||||
}
|
||||
|
||||
return output;
|
||||
});
|
||||
} as F);
|
||||
if (config.serviceName) {
|
||||
span.setTag(
|
||||
DDTags.SERVICE_NAME,
|
||||
`${env.DD_SERVICE}-${config.serviceName}`
|
||||
);
|
||||
}
|
||||
|
||||
if (config.makeSearchable) {
|
||||
span.setTag(DDTags.ANALYTICS, true);
|
||||
}
|
||||
|
||||
// The callback fn needs to be wrapped in an arrow fn as the activate fn clobbers `this`
|
||||
return tracer.scope().activate(span, () => {
|
||||
const output = target.call(this, ...args);
|
||||
|
||||
if (output && typeof output.then === "function") {
|
||||
output
|
||||
.catch((error: Error | undefined) => {
|
||||
if (error instanceof Error) {
|
||||
Tracing.setError(error, span);
|
||||
}
|
||||
})
|
||||
.finally(() => {
|
||||
span.finish();
|
||||
});
|
||||
} else {
|
||||
span.finish();
|
||||
}
|
||||
|
||||
return output;
|
||||
});
|
||||
} as F);
|
||||
|
||||
const traceMethod = (config?: TraceConfig) =>
|
||||
function <R, A extends any[], F extends (...args: A) => R>(
|
||||
|
||||
@@ -98,8 +98,9 @@ class AuthenticationProvider extends Model {
|
||||
}
|
||||
|
||||
disable = async (options?: SaveOptions<AuthenticationProvider>) => {
|
||||
const res = await (this
|
||||
.constructor as typeof AuthenticationProvider).findAndCountAll({
|
||||
const res = await (
|
||||
this.constructor as typeof AuthenticationProvider
|
||||
).findAndCountAll({
|
||||
...options,
|
||||
where: {
|
||||
teamId: this.teamId,
|
||||
|
||||
@@ -527,8 +527,9 @@ class Document extends ParanoidModel {
|
||||
const getChildDocumentIds = async (
|
||||
...parentDocumentId: string[]
|
||||
): Promise<string[]> => {
|
||||
const childDocuments = await (this
|
||||
.constructor as typeof Document).findAll({
|
||||
const childDocuments = await (
|
||||
this.constructor as typeof Document
|
||||
).findAll({
|
||||
attributes: ["id"],
|
||||
where: {
|
||||
parentDocumentId,
|
||||
@@ -560,8 +561,9 @@ class Document extends ParanoidModel {
|
||||
|
||||
// Helper to archive all child documents for a document
|
||||
const archiveChildren = async (parentDocumentId: string) => {
|
||||
const childDocuments = await (this
|
||||
.constructor as typeof Document).findAll({
|
||||
const childDocuments = await (
|
||||
this.constructor as typeof Document
|
||||
).findAll({
|
||||
where: {
|
||||
parentDocumentId,
|
||||
},
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
/* eslint-disable @typescript-eslint/ban-types */
|
||||
|
||||
const Deprecated = (message?: string) => (
|
||||
target: Object,
|
||||
propertyKey: string
|
||||
) => {
|
||||
if (process.env[propertyKey]) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(
|
||||
`The environment variable ${propertyKey} is deprecated and will be removed in a future release. ${message}`
|
||||
);
|
||||
}
|
||||
};
|
||||
const Deprecated =
|
||||
(message?: string) => (target: Object, propertyKey: string) => {
|
||||
if (process.env[propertyKey]) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(
|
||||
`The environment variable ${propertyKey} is deprecated and will be removed in a future release. ${message}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export default Deprecated;
|
||||
|
||||
@@ -4,9 +4,7 @@ import NotificationHelper from "@server/models/helpers/NotificationHelper";
|
||||
import { CollectionEvent } from "@server/types";
|
||||
import BaseTask, { TaskPriority } from "./BaseTask";
|
||||
|
||||
export default class CollectionCreatedNotificationsTask extends BaseTask<
|
||||
CollectionEvent
|
||||
> {
|
||||
export default class CollectionCreatedNotificationsTask extends BaseTask<CollectionEvent> {
|
||||
public async perform(event: CollectionEvent) {
|
||||
const collection = await Collection.findByPk(event.collectionId);
|
||||
|
||||
@@ -15,10 +13,11 @@ export default class CollectionCreatedNotificationsTask extends BaseTask<
|
||||
return;
|
||||
}
|
||||
|
||||
const recipients = await NotificationHelper.getCollectionNotificationRecipients(
|
||||
collection,
|
||||
NotificationEventType.CreateCollection
|
||||
);
|
||||
const recipients =
|
||||
await NotificationHelper.getCollectionNotificationRecipients(
|
||||
collection,
|
||||
NotificationEventType.CreateCollection
|
||||
);
|
||||
|
||||
for (const recipient of recipients) {
|
||||
// Suppress notifications for suspended users
|
||||
|
||||
@@ -7,9 +7,7 @@ import ProsemirrorHelper from "@server/models/helpers/ProsemirrorHelper";
|
||||
import { CommentEvent } from "@server/types";
|
||||
import BaseTask, { TaskPriority } from "./BaseTask";
|
||||
|
||||
export default class CommentCreatedNotificationsTask extends BaseTask<
|
||||
CommentEvent
|
||||
> {
|
||||
export default class CommentCreatedNotificationsTask extends BaseTask<CommentEvent> {
|
||||
public async perform(event: CommentEvent) {
|
||||
const [document, comment] = await Promise.all([
|
||||
Document.scope("withCollection").findOne({
|
||||
|
||||
@@ -4,9 +4,7 @@ import ProsemirrorHelper from "@server/models/helpers/ProsemirrorHelper";
|
||||
import { CommentEvent, CommentUpdateEvent } from "@server/types";
|
||||
import BaseTask, { TaskPriority } from "./BaseTask";
|
||||
|
||||
export default class CommentUpdatedNotificationsTask extends BaseTask<
|
||||
CommentEvent
|
||||
> {
|
||||
export default class CommentUpdatedNotificationsTask extends BaseTask<CommentEvent> {
|
||||
public async perform(event: CommentUpdateEvent) {
|
||||
const [document, comment] = await Promise.all([
|
||||
Document.scope("withCollection").findOne({
|
||||
|
||||
@@ -6,9 +6,7 @@ import NotificationHelper from "@server/models/helpers/NotificationHelper";
|
||||
import { DocumentEvent } from "@server/types";
|
||||
import BaseTask, { TaskPriority } from "./BaseTask";
|
||||
|
||||
export default class DocumentPublishedNotificationsTask extends BaseTask<
|
||||
DocumentEvent
|
||||
> {
|
||||
export default class DocumentPublishedNotificationsTask extends BaseTask<DocumentEvent> {
|
||||
public async perform(event: DocumentEvent) {
|
||||
const document = await Document.findByPk(event.documentId, {
|
||||
includeState: true,
|
||||
|
||||
@@ -287,7 +287,8 @@ export default class ImportNotionTask extends ImportTask {
|
||||
/**
|
||||
* Regex to find markdown images of all types
|
||||
*/
|
||||
private ImageRegex = /!\[(?<alt>[^\][]*?)]\((?<filename>[^\][]*?)(?=“|\))“?(?<title>[^\][”]+)?”?\)/g;
|
||||
private ImageRegex =
|
||||
/!\[(?<alt>[^\][]*?)]\((?<filename>[^\][]*?)(?=“|\))“?(?<title>[^\][”]+)?”?\)/g;
|
||||
|
||||
/**
|
||||
* Regex to find markdown links containing ID's that look like UUID's with the
|
||||
@@ -298,5 +299,6 @@ export default class ImportNotionTask extends ImportTask {
|
||||
/**
|
||||
* Regex to find Notion document UUID's in the title of a document.
|
||||
*/
|
||||
private NotionUUIDRegex = /\s([0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}|[0-9a-fA-F]{32})$/;
|
||||
private NotionUUIDRegex =
|
||||
/\s([0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}|[0-9a-fA-F]{32})$/;
|
||||
}
|
||||
|
||||
@@ -11,9 +11,7 @@ import NotificationHelper from "@server/models/helpers/NotificationHelper";
|
||||
import { RevisionEvent } from "@server/types";
|
||||
import BaseTask, { TaskPriority } from "./BaseTask";
|
||||
|
||||
export default class RevisionCreatedNotificationsTask extends BaseTask<
|
||||
RevisionEvent
|
||||
> {
|
||||
export default class RevisionCreatedNotificationsTask extends BaseTask<RevisionEvent> {
|
||||
public async perform(event: RevisionEvent) {
|
||||
const [document, revision] = await Promise.all([
|
||||
Document.findByPk(event.documentId, { includeState: true }),
|
||||
|
||||
@@ -133,14 +133,12 @@ router.post("auth.info", auth(), async (ctx: APIContext) => {
|
||||
includeDetails: true,
|
||||
}),
|
||||
team: presentTeam(team),
|
||||
availableTeams: uniqBy(
|
||||
[...signedInTeams, ...availableTeams],
|
||||
"id"
|
||||
).map((team) =>
|
||||
presentAvailableTeam(
|
||||
team,
|
||||
signedInTeamIds.includes(team.id) || team.id === user.teamId
|
||||
)
|
||||
availableTeams: uniqBy([...signedInTeams, ...availableTeams], "id").map(
|
||||
(team) =>
|
||||
presentAvailableTeam(
|
||||
team,
|
||||
signedInTeamIds.includes(team.id) || team.id === user.teamId
|
||||
)
|
||||
),
|
||||
},
|
||||
policies: presentPolicies(user, [team]),
|
||||
|
||||
@@ -168,10 +168,8 @@ router.post(
|
||||
rateLimiter(RateLimiterStrategy.TenPerHour),
|
||||
auth(),
|
||||
async (ctx: APIContext) => {
|
||||
const {
|
||||
attachmentId,
|
||||
format = FileOperationFormat.MarkdownZip,
|
||||
} = ctx.request.body;
|
||||
const { attachmentId, format = FileOperationFormat.MarkdownZip } =
|
||||
ctx.request.body;
|
||||
assertUuid(attachmentId, "attachmentId is required");
|
||||
|
||||
const { user } = ctx.state.auth;
|
||||
@@ -630,16 +628,8 @@ router.post(
|
||||
);
|
||||
|
||||
router.post("collections.update", auth(), async (ctx: APIContext) => {
|
||||
const {
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
icon,
|
||||
permission,
|
||||
color,
|
||||
sort,
|
||||
sharing,
|
||||
} = ctx.request.body;
|
||||
const { id, name, description, icon, permission, color, sort, sharing } =
|
||||
ctx.request.body;
|
||||
|
||||
if (color) {
|
||||
assertHexColor(color, "Invalid hex value (please use format #FFFFFF)");
|
||||
|
||||
@@ -995,21 +995,18 @@ router.post(
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
documents,
|
||||
collections,
|
||||
collectionChanged,
|
||||
} = await sequelize.transaction(async (transaction) =>
|
||||
documentMover({
|
||||
user,
|
||||
document,
|
||||
collectionId,
|
||||
parentDocumentId,
|
||||
index,
|
||||
ip: ctx.request.ip,
|
||||
transaction,
|
||||
})
|
||||
);
|
||||
const { documents, collections, collectionChanged } =
|
||||
await sequelize.transaction(async (transaction) =>
|
||||
documentMover({
|
||||
user,
|
||||
document,
|
||||
collectionId,
|
||||
parentDocumentId,
|
||||
index,
|
||||
ip: ctx.request.ip,
|
||||
transaction,
|
||||
})
|
||||
);
|
||||
|
||||
ctx.body = {
|
||||
data: {
|
||||
|
||||
@@ -39,50 +39,49 @@ export default function init(
|
||||
],
|
||||
});
|
||||
|
||||
server.on("upgrade", function (
|
||||
req: IncomingMessage,
|
||||
socket: Duplex,
|
||||
head: Buffer
|
||||
) {
|
||||
if (req.url?.startsWith(path)) {
|
||||
// parse document id and close connection if not present in request
|
||||
const documentId = url
|
||||
.parse(req.url)
|
||||
.pathname?.replace(path, "")
|
||||
.split("/")
|
||||
.pop();
|
||||
server.on(
|
||||
"upgrade",
|
||||
function (req: IncomingMessage, socket: Duplex, head: Buffer) {
|
||||
if (req.url?.startsWith(path)) {
|
||||
// parse document id and close connection if not present in request
|
||||
const documentId = url
|
||||
.parse(req.url)
|
||||
.pathname?.replace(path, "")
|
||||
.split("/")
|
||||
.pop();
|
||||
|
||||
if (documentId) {
|
||||
wss.handleUpgrade(req, socket, head, (client) => {
|
||||
// Handle websocket connection errors as soon as the client is upgraded
|
||||
client.on("error", (error) => {
|
||||
Logger.error(
|
||||
`Websocket error`,
|
||||
error,
|
||||
{
|
||||
documentId,
|
||||
},
|
||||
req
|
||||
);
|
||||
if (documentId) {
|
||||
wss.handleUpgrade(req, socket, head, (client) => {
|
||||
// Handle websocket connection errors as soon as the client is upgraded
|
||||
client.on("error", (error) => {
|
||||
Logger.error(
|
||||
`Websocket error`,
|
||||
error,
|
||||
{
|
||||
documentId,
|
||||
},
|
||||
req
|
||||
);
|
||||
});
|
||||
|
||||
hocuspocus.handleConnection(client, req, documentId);
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
hocuspocus.handleConnection(client, req, documentId);
|
||||
});
|
||||
if (
|
||||
req.url?.startsWith("/realtime") &&
|
||||
serviceNames.includes("websockets")
|
||||
) {
|
||||
// Nothing to do, the websockets service will handle this request
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
req.url?.startsWith("/realtime") &&
|
||||
serviceNames.includes("websockets")
|
||||
) {
|
||||
// Nothing to do, the websockets service will handle this request
|
||||
return;
|
||||
// If the collaboration service is running it will close the connection
|
||||
socket.end(`HTTP/1.1 400 Bad Request\r\n`);
|
||||
}
|
||||
|
||||
// If the collaboration service is running it will close the connection
|
||||
socket.end(`HTTP/1.1 400 Bad Request\r\n`);
|
||||
});
|
||||
);
|
||||
|
||||
ShutdownHelper.add("collaboration", ShutdownOrder.normal, () =>
|
||||
hocuspocus.destroy()
|
||||
|
||||
@@ -53,25 +53,24 @@ export default function init(
|
||||
);
|
||||
}
|
||||
|
||||
server.on("upgrade", function (
|
||||
req: IncomingMessage,
|
||||
socket: Duplex,
|
||||
head: Buffer
|
||||
) {
|
||||
if (req.url?.startsWith(path)) {
|
||||
invariant(ioHandleUpgrade, "Existing upgrade handler must exist");
|
||||
ioHandleUpgrade(req, socket, head);
|
||||
return;
|
||||
}
|
||||
server.on(
|
||||
"upgrade",
|
||||
function (req: IncomingMessage, socket: Duplex, head: Buffer) {
|
||||
if (req.url?.startsWith(path)) {
|
||||
invariant(ioHandleUpgrade, "Existing upgrade handler must exist");
|
||||
ioHandleUpgrade(req, socket, head);
|
||||
return;
|
||||
}
|
||||
|
||||
if (serviceNames.includes("collaboration")) {
|
||||
// Nothing to do, the collaboration service will handle this request
|
||||
return;
|
||||
}
|
||||
if (serviceNames.includes("collaboration")) {
|
||||
// Nothing to do, the collaboration service will handle this request
|
||||
return;
|
||||
}
|
||||
|
||||
// If the collaboration service isn't running then we need to close the connection
|
||||
socket.end(`HTTP/1.1 400 Bad Request\r\n`);
|
||||
});
|
||||
// If the collaboration service isn't running then we need to close the connection
|
||||
socket.end(`HTTP/1.1 400 Bad Request\r\n`);
|
||||
}
|
||||
);
|
||||
|
||||
ShutdownHelper.add("websockets", ShutdownOrder.normal, async () => {
|
||||
Metrics.gaugePerInstance("websockets.count", 0);
|
||||
|
||||
@@ -55,7 +55,8 @@ it("should parse attachment ID from markdown with title", () => {
|
||||
it("should parse multiple attachment IDs from markdown", () => {
|
||||
const uuid = uuidv4();
|
||||
const uuid2 = uuidv4();
|
||||
const results = parseAttachmentIds(`
|
||||
const results =
|
||||
parseAttachmentIds(`
|
||||
|
||||
some text
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { uniq, compact } from "lodash";
|
||||
|
||||
const attachmentRedirectRegex = /\/api\/attachments\.redirect\?id=(?<id>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/gi;
|
||||
const attachmentPublicRegex = /public\/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})\/(?<id>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/gi;
|
||||
const attachmentRedirectRegex =
|
||||
/\/api\/attachments\.redirect\?id=(?<id>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/gi;
|
||||
const attachmentPublicRegex =
|
||||
/public\/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})\/(?<id>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/gi;
|
||||
|
||||
export default function parseAttachmentIds(
|
||||
text: string,
|
||||
|
||||
@@ -14,17 +14,13 @@ const UPDATES_KEY = "UPDATES_KEY";
|
||||
export async function checkUpdates() {
|
||||
const secret = env.SECRET_KEY.slice(0, 6) + env.URL;
|
||||
const id = crypto.createHash("sha256").update(secret).digest("hex");
|
||||
const [
|
||||
userCount,
|
||||
teamCount,
|
||||
collectionCount,
|
||||
documentCount,
|
||||
] = await Promise.all([
|
||||
User.count(),
|
||||
Team.count(),
|
||||
Collection.count(),
|
||||
Document.count(),
|
||||
]);
|
||||
const [userCount, teamCount, collectionCount, documentCount] =
|
||||
await Promise.all([
|
||||
User.count(),
|
||||
Team.count(),
|
||||
Collection.count(),
|
||||
Document.count(),
|
||||
]);
|
||||
const body = JSON.stringify({
|
||||
id,
|
||||
version: 1,
|
||||
|
||||
@@ -18,7 +18,7 @@ export function CannotUseWithout(
|
||||
options: validationOptions,
|
||||
validator: {
|
||||
validate<T>(value: T, args: ValidationArguments) {
|
||||
const object = (args.object as unknown) as T;
|
||||
const object = args.object as unknown as T;
|
||||
const required = args.constraints[0] as string;
|
||||
return object[required] !== undefined;
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user