From 74722b80f2b71703340297ceb2c36104bcd2ca87 Mon Sep 17 00:00:00 2001 From: Tom Moor Date: Sun, 20 Aug 2023 10:04:34 -0400 Subject: [PATCH] chore: Refactor file storage (#5711) --- plugins/iframely/server/iframely.ts | 2 +- server/collaboration/PersistenceExtension.ts | 2 +- server/commands/attachmentCreator.ts | 4 +- server/commands/collectionExporter.ts | 9 +- .../commands/documentCollaborativeUpdater.ts | 2 +- server/commands/documentImporter.test.ts | 2 +- server/commands/documentMover.test.ts | 2 +- server/commands/documentPermanentDeleter.ts | 2 +- server/commands/documentUpdater.test.ts | 2 +- server/commands/fileOperationDeleter.ts | 2 +- server/commands/notificationUpdater.test.ts | 2 +- server/commands/pinCreator.ts | 2 +- server/commands/pinUpdater.ts | 2 +- server/commands/revisionCreator.ts | 2 +- server/commands/starCreator.test.ts | 2 +- server/commands/starDestroyer.ts | 2 +- server/commands/starUpdater.ts | 2 +- server/commands/subscriptionCreator.test.ts | 2 +- server/commands/subscriptionCreator.ts | 2 +- server/commands/subscriptionDestroyer.test.ts | 2 +- server/commands/teamPermanentDeleter.ts | 2 +- server/commands/teamProvisioner.ts | 2 +- server/commands/userDemoter.ts | 2 +- server/commands/userDestroyer.ts | 2 +- server/commands/userProvisioner.ts | 2 +- server/commands/userSuspender.ts | 2 +- server/commands/userUnsuspender.ts | 2 +- server/env.ts | 47 +++- server/index.ts | 4 +- server/middlewares/rateLimiter.ts | 2 +- server/middlewares/transaction.ts | 2 +- server/models/Attachment.ts | 18 +- server/models/FileOperation.ts | 8 +- server/models/decorators/Encrypted.ts | 2 +- server/models/helpers/DocumentHelper.tsx | 7 +- server/models/helpers/SearchHelper.ts | 2 +- server/queues/tasks/CleanupDemotedUserTask.ts | 2 +- .../tasks/CommentCreatedNotificationsTask.ts | 2 +- .../tasks/DetachDraftsFromCollectionTask.ts | 2 +- server/queues/tasks/ExportTask.ts | 4 +- server/queues/tasks/ImportTask.ts | 5 +- server/queues/tasks/InviteReminderTask.ts | 2 +- server/queues/tasks/UploadTeamAvatarTask.ts | 4 +- server/queues/tasks/UploadUserAvatarTask.ts | 4 +- server/queues/tasks/ValidateSSOAccessTask.ts | 2 +- .../api/attachments/attachments.test.ts | 2 +- server/routes/api/attachments/attachments.ts | 6 +- .../api/fileOperations/fileOperations.test.ts | 2 +- .../api/fileOperations/fileOperations.ts | 4 +- ...313000000-migrate-notification-settings.ts | 2 +- server/scripts/bootstrap.ts | 2 +- server/scripts/seed.ts | 2 +- server/services/websockets.ts | 2 +- .../sequelize.ts => storage/database.ts} | 0 server/storage/files/BaseStorage.ts | 99 +++++++ server/storage/files/S3Storage.ts | 254 ++++++++++++++++++ server/storage/files/__mocks__/index.ts | 9 + server/storage/files/index.ts | 3 + server/{ => storage}/redis.ts | 0 server/{database => storage}/vaults.ts | 0 server/test/setup.ts | 4 +- server/test/support.ts | 2 +- server/utils/RateLimiter.ts | 2 +- server/utils/__mocks__/s3.ts | 9 - server/utils/queue.ts | 2 +- server/utils/s3.ts | 217 --------------- server/utils/startup.ts | 2 +- server/utils/updates.ts | 2 +- 68 files changed, 496 insertions(+), 313 deletions(-) rename server/{database/sequelize.ts => storage/database.ts} (100%) create mode 100644 server/storage/files/BaseStorage.ts create mode 100644 server/storage/files/S3Storage.ts create mode 100644 server/storage/files/__mocks__/index.ts create mode 100644 server/storage/files/index.ts rename server/{ => storage}/redis.ts (100%) rename server/{database => storage}/vaults.ts (100%) delete mode 100644 server/utils/__mocks__/s3.ts delete mode 100644 server/utils/s3.ts diff --git a/plugins/iframely/server/iframely.ts b/plugins/iframely/server/iframely.ts index 081b4fec1..68dbc7982 100644 --- a/plugins/iframely/server/iframely.ts +++ b/plugins/iframely/server/iframely.ts @@ -2,7 +2,7 @@ import fetch from "fetch-with-proxy"; import env from "@server/env"; import { InternalError } from "@server/errors"; import Logger from "@server/logging/Logger"; -import Redis from "@server/redis"; +import Redis from "@server/storage/redis"; class Iframely { private static apiUrl = `${env.IFRAMELY_URL}/api`; diff --git a/server/collaboration/PersistenceExtension.ts b/server/collaboration/PersistenceExtension.ts index f47e27722..93e7ce66b 100644 --- a/server/collaboration/PersistenceExtension.ts +++ b/server/collaboration/PersistenceExtension.ts @@ -5,11 +5,11 @@ import { Extension, } from "@hocuspocus/server"; import * as Y from "yjs"; -import { sequelize } from "@server/database/sequelize"; import Logger from "@server/logging/Logger"; import { trace } from "@server/logging/tracing"; import Document from "@server/models/Document"; import ProsemirrorHelper from "@server/models/helpers/ProsemirrorHelper"; +import { sequelize } from "@server/storage/database"; import documentCollaborativeUpdater from "../commands/documentCollaborativeUpdater"; @trace() diff --git a/server/commands/attachmentCreator.ts b/server/commands/attachmentCreator.ts index e10ec8a0e..76bb4dd5c 100644 --- a/server/commands/attachmentCreator.ts +++ b/server/commands/attachmentCreator.ts @@ -1,7 +1,7 @@ import { Transaction } from "sequelize"; import { v4 as uuidv4 } from "uuid"; import { Attachment, Event, User } from "@server/models"; -import { uploadToS3 } from "@server/utils/s3"; +import FileStorage from "@server/storage/files"; export default async function attachmentCreator({ id, @@ -24,7 +24,7 @@ export default async function attachmentCreator({ }) { const key = `uploads/${user.id}/${uuidv4()}/${name}`; const acl = process.env.AWS_S3_ACL || "private"; - const url = await uploadToS3({ + const url = await FileStorage.upload({ body: buffer, contentType: type, contentLength: buffer.length, diff --git a/server/commands/collectionExporter.ts b/server/commands/collectionExporter.ts index 1e884082a..b8e14a826 100644 --- a/server/commands/collectionExporter.ts +++ b/server/commands/collectionExporter.ts @@ -1,4 +1,5 @@ import { Transaction } from "sequelize"; +import { v4 as uuidv4 } from "uuid"; import { FileOperationFormat, FileOperationType, @@ -6,7 +7,6 @@ import { } from "@shared/types"; import { traceFunction } from "@server/logging/tracing"; import { Collection, Event, Team, User, FileOperation } from "@server/models"; -import { getAWSKeyForFileOp } from "@server/utils/s3"; type Props = { collection?: Collection; @@ -18,6 +18,11 @@ type Props = { transaction: Transaction; }; +function getKeyForFileOp(teamId: string, name: string) { + const bucket = "uploads"; + return `${bucket}/${teamId}/${uuidv4()}/${name}-export.zip`; +} + async function collectionExporter({ collection, team, @@ -28,7 +33,7 @@ async function collectionExporter({ transaction, }: Props) { const collectionId = collection?.id; - const key = getAWSKeyForFileOp(user.teamId, collection?.name || team.name); + const key = getKeyForFileOp(user.teamId, collection?.name || team.name); const fileOperation = await FileOperation.create( { type: FileOperationType.Export, diff --git a/server/commands/documentCollaborativeUpdater.ts b/server/commands/documentCollaborativeUpdater.ts index 64df3c30b..da7c84878 100644 --- a/server/commands/documentCollaborativeUpdater.ts +++ b/server/commands/documentCollaborativeUpdater.ts @@ -2,10 +2,10 @@ import { yDocToProsemirrorJSON } from "@getoutline/y-prosemirror"; import uniq from "lodash/uniq"; import { Node } from "prosemirror-model"; import * as Y from "yjs"; -import { sequelize } from "@server/database/sequelize"; import { schema, serializer } from "@server/editor"; import Logger from "@server/logging/Logger"; import { Document, Event } from "@server/models"; +import { sequelize } from "@server/storage/database"; type Props = { /** The document ID to update */ diff --git a/server/commands/documentImporter.test.ts b/server/commands/documentImporter.test.ts index d0c2810d2..cc83c8426 100644 --- a/server/commands/documentImporter.test.ts +++ b/server/commands/documentImporter.test.ts @@ -5,7 +5,7 @@ import { buildUser } from "@server/test/factories"; import { setupTestDatabase } from "@server/test/support"; import documentImporter from "./documentImporter"; -jest.mock("../utils/s3"); +jest.mock("@server/storage/files"); setupTestDatabase(); diff --git a/server/commands/documentMover.test.ts b/server/commands/documentMover.test.ts index 2395a1c7b..6e8f0da33 100644 --- a/server/commands/documentMover.test.ts +++ b/server/commands/documentMover.test.ts @@ -1,5 +1,5 @@ -import { sequelize } from "@server/database/sequelize"; import Pin from "@server/models/Pin"; +import { sequelize } from "@server/storage/database"; import { buildDocument, buildCollection } from "@server/test/factories"; import { setupTestDatabase, seed } from "@server/test/support"; import documentMover from "./documentMover"; diff --git a/server/commands/documentPermanentDeleter.ts b/server/commands/documentPermanentDeleter.ts index 5de4ab3f5..a0c4a23b3 100644 --- a/server/commands/documentPermanentDeleter.ts +++ b/server/commands/documentPermanentDeleter.ts @@ -1,9 +1,9 @@ import uniq from "lodash/uniq"; import { QueryTypes } from "sequelize"; -import { sequelize } from "@server/database/sequelize"; import Logger from "@server/logging/Logger"; import { Document, Attachment } from "@server/models"; import DeleteAttachmentTask from "@server/queues/tasks/DeleteAttachmentTask"; +import { sequelize } from "@server/storage/database"; import parseAttachmentIds from "@server/utils/parseAttachmentIds"; export default async function documentPermanentDeleter(documents: Document[]) { diff --git a/server/commands/documentUpdater.test.ts b/server/commands/documentUpdater.test.ts index 669d89cf5..259615353 100644 --- a/server/commands/documentUpdater.test.ts +++ b/server/commands/documentUpdater.test.ts @@ -1,5 +1,5 @@ -import { sequelize } from "@server/database/sequelize"; import { Event } from "@server/models"; +import { sequelize } from "@server/storage/database"; import { buildDocument, buildUser } from "@server/test/factories"; import { setupTestDatabase } from "@server/test/support"; import documentUpdater from "./documentUpdater"; diff --git a/server/commands/fileOperationDeleter.ts b/server/commands/fileOperationDeleter.ts index e45af66bb..c891af271 100644 --- a/server/commands/fileOperationDeleter.ts +++ b/server/commands/fileOperationDeleter.ts @@ -1,5 +1,5 @@ -import { sequelize } from "@server/database/sequelize"; import { FileOperation, Event, User } from "@server/models"; +import { sequelize } from "@server/storage/database"; export default async function fileOperationDeleter( fileOperation: FileOperation, diff --git a/server/commands/notificationUpdater.test.ts b/server/commands/notificationUpdater.test.ts index c6a9672aa..8dc746be7 100644 --- a/server/commands/notificationUpdater.test.ts +++ b/server/commands/notificationUpdater.test.ts @@ -1,6 +1,6 @@ import { NotificationEventType } from "@shared/types"; -import { sequelize } from "@server/database/sequelize"; import { Event } from "@server/models"; +import { sequelize } from "@server/storage/database"; import { buildUser, buildNotification, diff --git a/server/commands/pinCreator.ts b/server/commands/pinCreator.ts index d7f684592..cab9caa5c 100644 --- a/server/commands/pinCreator.ts +++ b/server/commands/pinCreator.ts @@ -1,9 +1,9 @@ import fractionalIndex from "fractional-index"; import { Sequelize, Op, WhereOptions } from "sequelize"; import { PinValidation } from "@shared/validations"; -import { sequelize } from "@server/database/sequelize"; import { ValidationError } from "@server/errors"; import { Pin, User, Event } from "@server/models"; +import { sequelize } from "@server/storage/database"; type Props = { /** The user creating the pin */ diff --git a/server/commands/pinUpdater.ts b/server/commands/pinUpdater.ts index 9cb61ed39..e87173f48 100644 --- a/server/commands/pinUpdater.ts +++ b/server/commands/pinUpdater.ts @@ -1,5 +1,5 @@ -import { sequelize } from "@server/database/sequelize"; import { Event, Pin, User } from "@server/models"; +import { sequelize } from "@server/storage/database"; type Props = { /** The user updating the pin */ diff --git a/server/commands/revisionCreator.ts b/server/commands/revisionCreator.ts index 962b8fe7f..69065e766 100644 --- a/server/commands/revisionCreator.ts +++ b/server/commands/revisionCreator.ts @@ -1,5 +1,5 @@ -import { sequelize } from "@server/database/sequelize"; import { Document, User, Event, Revision } from "@server/models"; +import { sequelize } from "@server/storage/database"; export default async function revisionCreator({ document, diff --git a/server/commands/starCreator.test.ts b/server/commands/starCreator.test.ts index 21e121716..d91a13b0b 100644 --- a/server/commands/starCreator.test.ts +++ b/server/commands/starCreator.test.ts @@ -1,5 +1,5 @@ -import { sequelize } from "@server/database/sequelize"; import { Star, Event } from "@server/models"; +import { sequelize } from "@server/storage/database"; import { buildDocument, buildUser } from "@server/test/factories"; import { setupTestDatabase } from "@server/test/support"; import starCreator from "./starCreator"; diff --git a/server/commands/starDestroyer.ts b/server/commands/starDestroyer.ts index b8ff557d9..b46e5071a 100644 --- a/server/commands/starDestroyer.ts +++ b/server/commands/starDestroyer.ts @@ -1,6 +1,6 @@ import { Transaction } from "sequelize"; -import { sequelize } from "@server/database/sequelize"; import { Event, Star, User } from "@server/models"; +import { sequelize } from "@server/storage/database"; type Props = { /** The user destroying the star */ diff --git a/server/commands/starUpdater.ts b/server/commands/starUpdater.ts index 0b5d48846..33152371b 100644 --- a/server/commands/starUpdater.ts +++ b/server/commands/starUpdater.ts @@ -1,5 +1,5 @@ -import { sequelize } from "@server/database/sequelize"; import { Event, Star, User } from "@server/models"; +import { sequelize } from "@server/storage/database"; type Props = { /** The user updating the star */ diff --git a/server/commands/subscriptionCreator.test.ts b/server/commands/subscriptionCreator.test.ts index 2cbeaf0db..d10772402 100644 --- a/server/commands/subscriptionCreator.test.ts +++ b/server/commands/subscriptionCreator.test.ts @@ -1,5 +1,5 @@ -import { sequelize } from "@server/database/sequelize"; import { Subscription, Event } from "@server/models"; +import { sequelize } from "@server/storage/database"; import { buildDocument, buildUser } from "@server/test/factories"; import { setupTestDatabase } from "@server/test/support"; import subscriptionCreator from "./subscriptionCreator"; diff --git a/server/commands/subscriptionCreator.ts b/server/commands/subscriptionCreator.ts index c5fe15804..4ad5e0a36 100644 --- a/server/commands/subscriptionCreator.ts +++ b/server/commands/subscriptionCreator.ts @@ -1,6 +1,6 @@ import { Transaction } from "sequelize"; -import { sequelize } from "@server/database/sequelize"; import { Subscription, Event, User, Document } from "@server/models"; +import { sequelize } from "@server/storage/database"; import { DocumentEvent, RevisionEvent } from "@server/types"; type Props = { diff --git a/server/commands/subscriptionDestroyer.test.ts b/server/commands/subscriptionDestroyer.test.ts index 172994550..42bfe577a 100644 --- a/server/commands/subscriptionDestroyer.test.ts +++ b/server/commands/subscriptionDestroyer.test.ts @@ -1,5 +1,5 @@ -import { sequelize } from "@server/database/sequelize"; import { Subscription, Event } from "@server/models"; +import { sequelize } from "@server/storage/database"; import { buildDocument, buildSubscription, diff --git a/server/commands/teamPermanentDeleter.ts b/server/commands/teamPermanentDeleter.ts index f5588ca37..53e83f30a 100644 --- a/server/commands/teamPermanentDeleter.ts +++ b/server/commands/teamPermanentDeleter.ts @@ -1,5 +1,4 @@ import { Transaction } from "sequelize"; -import { sequelize } from "@server/database/sequelize"; import Logger from "@server/logging/Logger"; import { traceFunction } from "@server/logging/tracing"; import { @@ -19,6 +18,7 @@ import { SearchQuery, Share, } from "@server/models"; +import { sequelize } from "@server/storage/database"; async function teamPermanentDeleter(team: Team) { if (!team.deletedAt) { diff --git a/server/commands/teamProvisioner.ts b/server/commands/teamProvisioner.ts index adbf4f6b0..38832b927 100644 --- a/server/commands/teamProvisioner.ts +++ b/server/commands/teamProvisioner.ts @@ -1,5 +1,4 @@ import teamCreator from "@server/commands/teamCreator"; -import { sequelize } from "@server/database/sequelize"; import env from "@server/env"; import { DomainNotAllowedError, @@ -8,6 +7,7 @@ import { } from "@server/errors"; import { traceFunction } from "@server/logging/tracing"; import { Team, AuthenticationProvider } from "@server/models"; +import { sequelize } from "@server/storage/database"; type TeamProvisionerResult = { team: Team; diff --git a/server/commands/userDemoter.ts b/server/commands/userDemoter.ts index 10ac27b12..296fd825f 100644 --- a/server/commands/userDemoter.ts +++ b/server/commands/userDemoter.ts @@ -1,8 +1,8 @@ -import { sequelize } from "@server/database/sequelize"; import { ValidationError } from "@server/errors"; import { Event, User } from "@server/models"; import type { UserRole } from "@server/models/User"; import CleanupDemotedUserTask from "@server/queues/tasks/CleanupDemotedUserTask"; +import { sequelize } from "@server/storage/database"; type Props = { user: User; diff --git a/server/commands/userDestroyer.ts b/server/commands/userDestroyer.ts index 182ae03e3..87741d0c2 100644 --- a/server/commands/userDestroyer.ts +++ b/server/commands/userDestroyer.ts @@ -1,6 +1,6 @@ import { Op } from "sequelize"; -import { sequelize } from "@server/database/sequelize"; import { Event, User } from "@server/models"; +import { sequelize } from "@server/storage/database"; import { ValidationError } from "../errors"; export default async function userDestroyer({ diff --git a/server/commands/userProvisioner.ts b/server/commands/userProvisioner.ts index bc5e2c49b..cf8f3f47b 100644 --- a/server/commands/userProvisioner.ts +++ b/server/commands/userProvisioner.ts @@ -1,4 +1,3 @@ -import { sequelize } from "@server/database/sequelize"; import InviteAcceptedEmail from "@server/emails/templates/InviteAcceptedEmail"; import { DomainNotAllowedError, @@ -6,6 +5,7 @@ import { InviteRequiredError, } from "@server/errors"; import { Event, Team, User, UserAuthentication } from "@server/models"; +import { sequelize } from "@server/storage/database"; type UserProvisionerResult = { user: User; diff --git a/server/commands/userSuspender.ts b/server/commands/userSuspender.ts index c2fe52039..03c7b5c54 100644 --- a/server/commands/userSuspender.ts +++ b/server/commands/userSuspender.ts @@ -1,7 +1,7 @@ import { Transaction } from "sequelize"; -import { sequelize } from "@server/database/sequelize"; import { User, Event, GroupUser } from "@server/models"; import CleanupDemotedUserTask from "@server/queues/tasks/CleanupDemotedUserTask"; +import { sequelize } from "@server/storage/database"; import { ValidationError } from "../errors"; type Props = { diff --git a/server/commands/userUnsuspender.ts b/server/commands/userUnsuspender.ts index c95aa9d3c..ba13a5273 100644 --- a/server/commands/userUnsuspender.ts +++ b/server/commands/userUnsuspender.ts @@ -1,6 +1,6 @@ import { Transaction } from "sequelize"; -import { sequelize } from "@server/database/sequelize"; import { User, Event } from "@server/models"; +import { sequelize } from "@server/storage/database"; import { ValidationError } from "../errors"; type Props = { diff --git a/server/env.ts b/server/env.ts index 11f2af6f1..d8ff86de6 100644 --- a/server/env.ts +++ b/server/env.ts @@ -568,10 +568,35 @@ export class Environment { public AWS_S3_UPLOAD_MAX_SIZE = this.toOptionalNumber(process.env.AWS_S3_UPLOAD_MAX_SIZE) ?? 100000000; + /** + * Access key ID for AWS S3. + */ + @IsOptional() + public AWS_ACCESS_KEY_ID = this.toOptionalString( + process.env.AWS_ACCESS_KEY_ID + ); + + /** + * Secret key for AWS S3. + */ + @IsOptional() + @CannotUseWithout("AWS_ACCESS_KEY_ID") + public AWS_SECRET_ACCESS_KEY = this.toOptionalString( + process.env.AWS_SECRET_ACCESS_KEY + ); + + /** + * The name of the AWS S3 region to use. + */ + @IsOptional() + @CannotUseWithout("AWS_ACCESS_KEY_ID") + public AWS_REGION = this.toOptionalString(process.env.AWS_REGION); + /** * Optional AWS S3 endpoint URL for file attachments. */ @IsOptional() + @CannotUseWithout("AWS_ACCESS_KEY_ID") public AWS_S3_ACCELERATE_URL = this.toOptionalString( process.env.AWS_S3_ACCELERATE_URL ); @@ -580,8 +605,26 @@ export class Environment { * Optional AWS S3 endpoint URL for file attachments. */ @IsOptional() - public AWS_S3_UPLOAD_BUCKET_URL = this.toOptionalString( - process.env.AWS_S3_UPLOAD_BUCKET_URL + @CannotUseWithout("AWS_ACCESS_KEY_ID") + public AWS_S3_UPLOAD_BUCKET_URL = process.env.AWS_S3_UPLOAD_BUCKET_URL ?? ""; + + /** + * The bucket name to store file attachments in. + */ + @IsOptional() + @CannotUseWithout("AWS_ACCESS_KEY_ID") + public AWS_S3_UPLOAD_BUCKET_NAME = this.toOptionalString( + process.env.AWS_S3_UPLOAD_BUCKET_NAME + ); + + /** + * Whether to force path style URLs for S3 objects, this is required for some + * S3-compatible storage providers. + */ + @IsOptional() + @CannotUseWithout("AWS_ACCESS_KEY_ID") + public AWS_S3_FORCE_PATH_STYLE = this.toBoolean( + process.env.AWS_S3_FORCE_PATH_STYLE ?? "true" ); /** diff --git a/server/index.ts b/server/index.ts index 05ea9e80d..781410bb5 100644 --- a/server/index.ts +++ b/server/index.ts @@ -23,8 +23,8 @@ import { checkEnv, checkPendingMigrations } from "./utils/startup"; import { checkUpdates } from "./utils/updates"; import onerror from "./onerror"; import ShutdownHelper, { ShutdownOrder } from "./utils/ShutdownHelper"; -import { sequelize } from "./database/sequelize"; -import RedisAdapter from "./redis"; +import { sequelize } from "./storage/database"; +import RedisAdapter from "./storage/redis"; import Metrics from "./logging/Metrics"; // The default is to run all services to make development and OSS installations diff --git a/server/middlewares/rateLimiter.ts b/server/middlewares/rateLimiter.ts index 3bc620a6d..3d418bb21 100644 --- a/server/middlewares/rateLimiter.ts +++ b/server/middlewares/rateLimiter.ts @@ -4,7 +4,7 @@ import env from "@server/env"; import { RateLimitExceededError } from "@server/errors"; import Logger from "@server/logging/Logger"; import Metrics from "@server/logging/Metrics"; -import Redis from "@server/redis"; +import Redis from "@server/storage/redis"; import RateLimiter from "@server/utils/RateLimiter"; /** diff --git a/server/middlewares/transaction.ts b/server/middlewares/transaction.ts index 58ca1b434..78d6d00c5 100644 --- a/server/middlewares/transaction.ts +++ b/server/middlewares/transaction.ts @@ -1,6 +1,6 @@ import { Next } from "koa"; import { Transaction } from "sequelize"; -import { sequelize } from "@server/database/sequelize"; +import { sequelize } from "@server/storage/database"; import { AppContext } from "@server/types"; /** diff --git a/server/models/Attachment.ts b/server/models/Attachment.ts index f4ad3361c..fe1518a95 100644 --- a/server/models/Attachment.ts +++ b/server/models/Attachment.ts @@ -10,13 +10,7 @@ import { DataType, IsNumeric, } from "sequelize-typescript"; -import { - publicS3Endpoint, - deleteFromS3, - getFileStream, - getSignedUrl, - getFileBuffer, -} from "@server/utils/s3"; +import FileStorage from "@server/storage/files"; import Document from "./Document"; import Team from "./Team"; import User from "./User"; @@ -76,14 +70,14 @@ class Attachment extends IdModel { * Get the contents of this attachment as a readable stream. */ get stream() { - return getFileStream(this.key); + return FileStorage.getFileStream(this.key); } /** * Get the contents of this attachment as a buffer. */ get buffer() { - return getFileBuffer(this.key); + return FileStorage.getFileBuffer(this.key); } /** @@ -105,21 +99,21 @@ class Attachment extends IdModel { * a signed URL must be used. */ get canonicalUrl() { - return encodeURI(`${publicS3Endpoint()}/${this.key}`); + return encodeURI(`${FileStorage.getPublicEndpoint()}/${this.key}`); } /** * Get a signed URL with the default expirt to download the attachment from storage. */ get signedUrl() { - return getSignedUrl(this.key); + return FileStorage.getSignedUrl(this.key); } // hooks @BeforeDestroy static async deleteAttachmentFromS3(model: Attachment) { - await deleteFromS3(model.key); + await FileStorage.deleteFile(model.key); } // associations diff --git a/server/models/FileOperation.ts b/server/models/FileOperation.ts index 43ee87261..9ad8603b1 100644 --- a/server/models/FileOperation.ts +++ b/server/models/FileOperation.ts @@ -13,7 +13,7 @@ import { FileOperationState, FileOperationType, } from "@shared/types"; -import { deleteFromS3, getFileStream } from "@server/utils/s3"; +import FileStorage from "@server/storage/files"; import Collection from "./Collection"; import Team from "./Team"; import User from "./User"; @@ -67,7 +67,7 @@ class FileOperation extends IdModel { expire = async function () { this.state = FileOperationState.Expired; try { - await deleteFromS3(this.key); + await FileStorage.deleteFile(this.key); } catch (err) { if (err.retryable) { throw err; @@ -80,14 +80,14 @@ class FileOperation extends IdModel { * The file operation contents as a readable stream. */ get stream() { - return getFileStream(this.key); + return FileStorage.getFileStream(this.key); } // hooks @BeforeDestroy static async deleteFileFromS3(model: FileOperation) { - await deleteFromS3(model.key); + await FileStorage.deleteFile(model.key); } // associations diff --git a/server/models/decorators/Encrypted.ts b/server/models/decorators/Encrypted.ts index 02ab7d382..ad24935b8 100644 --- a/server/models/decorators/Encrypted.ts +++ b/server/models/decorators/Encrypted.ts @@ -1,6 +1,6 @@ import isNil from "lodash/isNil"; -import vaults from "@server/database/vaults"; import Logger from "@server/logging/Logger"; +import vaults from "@server/storage/vaults"; const key = "sequelize:vault"; diff --git a/server/models/helpers/DocumentHelper.tsx b/server/models/helpers/DocumentHelper.tsx index 358041a63..459195b98 100644 --- a/server/models/helpers/DocumentHelper.tsx +++ b/server/models/helpers/DocumentHelper.tsx @@ -19,9 +19,9 @@ import { trace } from "@server/logging/tracing"; import type Document from "@server/models/Document"; import type Revision from "@server/models/Revision"; import User from "@server/models/User"; +import FileStorage from "@server/storage/files"; import diff from "@server/utils/diff"; import parseAttachmentIds from "@server/utils/parseAttachmentIds"; -import { getSignedUrl } from "@server/utils/s3"; import Attachment from "../Attachment"; import ProsemirrorHelper from "./ProsemirrorHelper"; @@ -324,7 +324,10 @@ export default class DocumentHelper { }); if (attachment) { - const signedUrl = await getSignedUrl(attachment.key, expiresIn); + const signedUrl = await FileStorage.getSignedUrl( + attachment.key, + expiresIn + ); text = text.replace( new RegExp(escapeRegExp(attachment.redirectUrl), "g"), signedUrl diff --git a/server/models/helpers/SearchHelper.ts b/server/models/helpers/SearchHelper.ts index 5ecee4f74..8cedb39d7 100644 --- a/server/models/helpers/SearchHelper.ts +++ b/server/models/helpers/SearchHelper.ts @@ -5,12 +5,12 @@ import map from "lodash/map"; import queryParser from "pg-tsquery"; import { Op, QueryTypes, WhereOptions } from "sequelize"; import { DateFilter } from "@shared/types"; -import { sequelize } from "@server/database/sequelize"; import Collection from "@server/models/Collection"; import Document from "@server/models/Document"; import Share from "@server/models/Share"; import Team from "@server/models/Team"; import User from "@server/models/User"; +import { sequelize } from "@server/storage/database"; type SearchResponse = { results: { diff --git a/server/queues/tasks/CleanupDemotedUserTask.ts b/server/queues/tasks/CleanupDemotedUserTask.ts index bf70eb39a..6ea96e14a 100644 --- a/server/queues/tasks/CleanupDemotedUserTask.ts +++ b/server/queues/tasks/CleanupDemotedUserTask.ts @@ -1,6 +1,6 @@ -import { sequelize } from "@server/database/sequelize"; import Logger from "@server/logging/Logger"; import { WebhookSubscription, ApiKey, User } from "@server/models"; +import { sequelize } from "@server/storage/database"; import BaseTask from "./BaseTask"; type Props = { diff --git a/server/queues/tasks/CommentCreatedNotificationsTask.ts b/server/queues/tasks/CommentCreatedNotificationsTask.ts index 008bcd4a7..d2903794c 100644 --- a/server/queues/tasks/CommentCreatedNotificationsTask.ts +++ b/server/queues/tasks/CommentCreatedNotificationsTask.ts @@ -1,9 +1,9 @@ import { NotificationEventType } from "@shared/types"; import subscriptionCreator from "@server/commands/subscriptionCreator"; -import { sequelize } from "@server/database/sequelize"; import { Comment, Document, Notification, User } from "@server/models"; import NotificationHelper from "@server/models/helpers/NotificationHelper"; import ProsemirrorHelper from "@server/models/helpers/ProsemirrorHelper"; +import { sequelize } from "@server/storage/database"; import { CommentEvent } from "@server/types"; import BaseTask, { TaskPriority } from "./BaseTask"; diff --git a/server/queues/tasks/DetachDraftsFromCollectionTask.ts b/server/queues/tasks/DetachDraftsFromCollectionTask.ts index 9f2d6ca86..417d37b52 100644 --- a/server/queues/tasks/DetachDraftsFromCollectionTask.ts +++ b/server/queues/tasks/DetachDraftsFromCollectionTask.ts @@ -1,7 +1,7 @@ import { Op } from "sequelize"; import documentMover from "@server/commands/documentMover"; -import { sequelize } from "@server/database/sequelize"; import { Collection, Document, User } from "@server/models"; +import { sequelize } from "@server/storage/database"; import BaseTask from "./BaseTask"; type Props = { diff --git a/server/queues/tasks/ExportTask.ts b/server/queues/tasks/ExportTask.ts index c24ffed59..8aca07483 100644 --- a/server/queues/tasks/ExportTask.ts +++ b/server/queues/tasks/ExportTask.ts @@ -6,7 +6,7 @@ import ExportSuccessEmail from "@server/emails/templates/ExportSuccessEmail"; import Logger from "@server/logging/Logger"; import { Collection, Event, FileOperation, Team, User } from "@server/models"; import fileOperationPresenter from "@server/presenters/fileOperation"; -import { uploadToS3 } from "@server/utils/s3"; +import FileStorage from "@server/storage/files"; import BaseTask, { TaskPriority } from "./BaseTask"; type Props = { @@ -60,7 +60,7 @@ export default abstract class ExportTask extends BaseTask { }); const stat = await fs.promises.stat(filePath); - const url = await uploadToS3({ + const url = await FileStorage.upload({ body: fs.createReadStream(filePath), contentLength: stat.size, contentType: "application/zip", diff --git a/server/queues/tasks/ImportTask.ts b/server/queues/tasks/ImportTask.ts index 6b818f837..d39db2148 100644 --- a/server/queues/tasks/ImportTask.ts +++ b/server/queues/tasks/ImportTask.ts @@ -1,4 +1,3 @@ -import { S3 } from "aws-sdk"; import truncate from "lodash/truncate"; import { CollectionPermission, @@ -8,7 +7,6 @@ import { import { CollectionValidation } from "@shared/validations"; import attachmentCreator from "@server/commands/attachmentCreator"; import documentCreator from "@server/commands/documentCreator"; -import { sequelize } from "@server/database/sequelize"; import { serializer } from "@server/editor"; import { InternalError, ValidationError } from "@server/errors"; import Logger from "@server/logging/Logger"; @@ -20,6 +18,7 @@ import { FileOperation, Attachment, } from "@server/models"; +import { sequelize } from "@server/storage/database"; import BaseTask, { TaskPriority } from "./BaseTask"; type Props = { @@ -207,7 +206,7 @@ export default abstract class ImportTask extends BaseTask { * @returns A promise that resolves to the structured data */ protected abstract parseData( - data: S3.Body, + data: Buffer | NodeJS.ReadableStream, fileOperation: FileOperation ): Promise; diff --git a/server/queues/tasks/InviteReminderTask.ts b/server/queues/tasks/InviteReminderTask.ts index 282340d4f..91fb0e64a 100644 --- a/server/queues/tasks/InviteReminderTask.ts +++ b/server/queues/tasks/InviteReminderTask.ts @@ -1,9 +1,9 @@ import { subDays } from "date-fns"; import { Op } from "sequelize"; -import { sequelize } from "@server/database/sequelize"; import InviteReminderEmail from "@server/emails/templates/InviteReminderEmail"; import { User } from "@server/models"; import { UserFlag } from "@server/models/User"; +import { sequelize } from "@server/storage/database"; import BaseTask, { TaskPriority, TaskSchedule } from "./BaseTask"; type Props = undefined; diff --git a/server/queues/tasks/UploadTeamAvatarTask.ts b/server/queues/tasks/UploadTeamAvatarTask.ts index 17d17ec53..33098a350 100644 --- a/server/queues/tasks/UploadTeamAvatarTask.ts +++ b/server/queues/tasks/UploadTeamAvatarTask.ts @@ -1,6 +1,6 @@ import { v4 as uuidv4 } from "uuid"; import { Team } from "@server/models"; -import { uploadToS3FromUrl } from "@server/utils/s3"; +import FileStorage from "@server/storage/files"; import BaseTask, { TaskPriority } from "./BaseTask"; type Props = { @@ -20,7 +20,7 @@ export default class UploadTeamAvatarTask extends BaseTask { rejectOnEmpty: true, }); - const avatarUrl = await uploadToS3FromUrl( + const avatarUrl = await FileStorage.uploadFromUrl( props.avatarUrl, `avatars/${team.id}/${uuidv4()}`, "public-read" diff --git a/server/queues/tasks/UploadUserAvatarTask.ts b/server/queues/tasks/UploadUserAvatarTask.ts index 6e4b0f4c7..f52ae53cd 100644 --- a/server/queues/tasks/UploadUserAvatarTask.ts +++ b/server/queues/tasks/UploadUserAvatarTask.ts @@ -1,6 +1,6 @@ import { v4 as uuidv4 } from "uuid"; import { User } from "@server/models"; -import { uploadToS3FromUrl } from "@server/utils/s3"; +import FileStorage from "@server/storage/files"; import BaseTask, { TaskPriority } from "./BaseTask"; type Props = { @@ -20,7 +20,7 @@ export default class UploadUserAvatarTask extends BaseTask { rejectOnEmpty: true, }); - const avatarUrl = await uploadToS3FromUrl( + const avatarUrl = await FileStorage.uploadFromUrl( props.avatarUrl, `avatars/${user.id}/${uuidv4()}`, "public-read" diff --git a/server/queues/tasks/ValidateSSOAccessTask.ts b/server/queues/tasks/ValidateSSOAccessTask.ts index 2e4398a01..41f35a88f 100644 --- a/server/queues/tasks/ValidateSSOAccessTask.ts +++ b/server/queues/tasks/ValidateSSOAccessTask.ts @@ -1,6 +1,6 @@ -import { sequelize } from "@server/database/sequelize"; import Logger from "@server/logging/Logger"; import { User, UserAuthentication } from "@server/models"; +import { sequelize } from "@server/storage/database"; import BaseTask, { TaskPriority } from "./BaseTask"; type Props = { diff --git a/server/routes/api/attachments/attachments.test.ts b/server/routes/api/attachments/attachments.test.ts index 2cd0879b9..d8e4c6e70 100644 --- a/server/routes/api/attachments/attachments.test.ts +++ b/server/routes/api/attachments/attachments.test.ts @@ -11,7 +11,7 @@ import { } from "@server/test/factories"; import { getTestServer } from "@server/test/support"; -jest.mock("@server/utils/s3"); +jest.mock("@server/storage/files"); const server = getTestServer(); diff --git a/server/routes/api/attachments/attachments.ts b/server/routes/api/attachments/attachments.ts index a908c0b58..d00a84ce4 100644 --- a/server/routes/api/attachments/attachments.ts +++ b/server/routes/api/attachments/attachments.ts @@ -12,9 +12,9 @@ import { Attachment, Document, Event } from "@server/models"; import AttachmentHelper from "@server/models/helpers/AttachmentHelper"; import { authorize } from "@server/policies"; import { presentAttachment } from "@server/presenters"; +import FileStorage from "@server/storage/files"; import { APIContext } from "@server/types"; import { RateLimiterStrategy } from "@server/utils/RateLimiter"; -import { getPresignedPost, publicS3Endpoint } from "@server/utils/s3"; import { assertIn } from "@server/validation"; import * as T from "./schema"; @@ -90,7 +90,7 @@ router.post( { transaction } ); - const presignedPost = await getPresignedPost( + const presignedPost = await FileStorage.getPresignedPost( key, acl, maxUploadSize, @@ -99,7 +99,7 @@ router.post( ctx.body = { data: { - uploadUrl: publicS3Endpoint(), + uploadUrl: FileStorage.getPublicEndpoint(), form: { "Cache-Control": "max-age=31557600", "Content-Type": contentType, diff --git a/server/routes/api/fileOperations/fileOperations.test.ts b/server/routes/api/fileOperations/fileOperations.test.ts index 2c0dbb216..4b0d0f365 100644 --- a/server/routes/api/fileOperations/fileOperations.test.ts +++ b/server/routes/api/fileOperations/fileOperations.test.ts @@ -12,7 +12,7 @@ import { getTestServer } from "@server/test/support"; const server = getTestServer(); -jest.mock("@server/utils/s3"); +jest.mock("@server/storage/files"); describe("#fileOperations.info", () => { it("should return fileOperation", async () => { diff --git a/server/routes/api/fileOperations/fileOperations.ts b/server/routes/api/fileOperations/fileOperations.ts index 1fb0b2d04..f2a159363 100644 --- a/server/routes/api/fileOperations/fileOperations.ts +++ b/server/routes/api/fileOperations/fileOperations.ts @@ -7,8 +7,8 @@ import validate from "@server/middlewares/validate"; import { FileOperation, Team } from "@server/models"; import { authorize } from "@server/policies"; import { presentFileOperation } from "@server/presenters"; +import FileStorage from "@server/storage/files"; import { APIContext } from "@server/types"; -import { getSignedUrl } from "@server/utils/s3"; import pagination from "../middlewares/pagination"; import * as T from "./schema"; @@ -84,7 +84,7 @@ const handleFileOperationsRedirect = async ( throw ValidationError(`${fileOperation.type} is not complete yet`); } - const accessUrl = await getSignedUrl(fileOperation.key); + const accessUrl = await FileStorage.getSignedUrl(fileOperation.key); ctx.redirect(accessUrl); }; diff --git a/server/scripts/20230313000000-migrate-notification-settings.ts b/server/scripts/20230313000000-migrate-notification-settings.ts index 872907a7e..d8e8d6586 100644 --- a/server/scripts/20230313000000-migrate-notification-settings.ts +++ b/server/scripts/20230313000000-migrate-notification-settings.ts @@ -1,7 +1,7 @@ import "./bootstrap"; import { QueryTypes } from "sequelize"; -import { sequelize } from "@server/database/sequelize"; import { User } from "@server/models"; +import { sequelize } from "@server/storage/database"; const limit = 100; let page = parseInt(process.argv[2], 10); diff --git a/server/scripts/bootstrap.ts b/server/scripts/bootstrap.ts index f64a7a140..a7a6fac04 100644 --- a/server/scripts/bootstrap.ts +++ b/server/scripts/bootstrap.ts @@ -5,6 +5,6 @@ if (process.env.NODE_ENV !== "test") { }); } -require("../database/sequelize"); +require("../storage/database"); export {}; diff --git a/server/scripts/seed.ts b/server/scripts/seed.ts index 47eb9e803..8498f8a0e 100644 --- a/server/scripts/seed.ts +++ b/server/scripts/seed.ts @@ -1,8 +1,8 @@ import "./bootstrap"; import teamCreator from "@server/commands/teamCreator"; -import { sequelize } from "@server/database/sequelize"; import env from "@server/env"; import { Team, User } from "@server/models"; +import { sequelize } from "@server/storage/database"; const email = process.argv[2]; diff --git a/server/services/websockets.ts b/server/services/websockets.ts index b60e4c1e4..cff14985c 100644 --- a/server/services/websockets.ts +++ b/server/services/websockets.ts @@ -11,11 +11,11 @@ import * as Tracing from "@server/logging/tracer"; import { traceFunction } from "@server/logging/tracing"; import { Collection, User } from "@server/models"; import { can } from "@server/policies"; +import Redis from "@server/storage/redis"; import ShutdownHelper, { ShutdownOrder } from "@server/utils/ShutdownHelper"; import { getUserForJWT } from "@server/utils/jwt"; import { websocketQueue } from "../queues"; import WebsocketsProcessor from "../queues/processors/WebsocketsProcessor"; -import Redis from "../redis"; type SocketWithAuth = IO.Socket & { client: IO.Socket["client"] & { diff --git a/server/database/sequelize.ts b/server/storage/database.ts similarity index 100% rename from server/database/sequelize.ts rename to server/storage/database.ts diff --git a/server/storage/files/BaseStorage.ts b/server/storage/files/BaseStorage.ts new file mode 100644 index 000000000..2c36ff2ce --- /dev/null +++ b/server/storage/files/BaseStorage.ts @@ -0,0 +1,99 @@ +import { Readable } from "stream"; +import { PresignedPost } from "aws-sdk/clients/s3"; + +export default abstract class BaseStorage { + /** + * Returns a presigned post for uploading files to the storage provider. + * + * @param key The path to store the file at + * @param acl The ACL to use + * @param maxUploadSize The maximum upload size in bytes + * @param contentType The content type of the file + * @returns The presigned post object to use on the client (TODO: Abstract away from S3) + */ + public abstract getPresignedPost( + key: string, + acl: string, + maxUploadSize: number, + contentType: string + ): Promise; + + /** + * Returns a stream for reading a file from the storage provider. + * + * @param key The path to the file + */ + public abstract getFileStream(key: string): NodeJS.ReadableStream | null; + + /** + * Returns a buffer of a file from the storage provider. + * + * @param key The path to the file + */ + public abstract getFileBuffer(key: string): Promise; + + /** + * Returns the public endpoint for the storage provider. + * + * @param isServerUpload Whether the upload is happening on the server or not + * @returns The public endpoint as a string + */ + public abstract getPublicEndpoint(isServerUpload?: boolean): string; + + /** + * Returns a signed URL for a file from the storage provider. + * + * @param key The path to the file + * @param expiresIn An optional number of seconds until the URL expires + */ + public abstract getSignedUrl( + key: string, + expiresIn?: number + ): Promise; + + /** + * Upload a file to the storage provider. + * + * @param body The file body + * @param contentLength The content length of the file + * @param contentType The content type of the file + * @param key The path to store the file at + * @param acl The ACL to use + * @returns The URL of the file + */ + public abstract upload({ + body, + contentLength, + contentType, + key, + acl, + }: { + body: Buffer | Uint8Array | Blob | string | Readable; + contentLength: number; + contentType: string; + key: string; + acl: string; + }): Promise; + + /** + * Upload a file to the storage provider directly from a remote URL. + * + * @param url The URL to upload from + * @param key The path to store the file at + * @param acl The ACL to use + * @returns The URL of the file + */ + public abstract uploadFromUrl( + url: string, + key: string, + acl: string + ): Promise; + + /** + * Delete a file from the storage provider. + * + * @param key The path to the file + * @returns A promise that resolves when the file is deleted + */ + public abstract deleteFile(key: string): Promise; +} diff --git a/server/storage/files/S3Storage.ts b/server/storage/files/S3Storage.ts new file mode 100644 index 000000000..fd672664b --- /dev/null +++ b/server/storage/files/S3Storage.ts @@ -0,0 +1,254 @@ +import util from "util"; +import AWS, { S3 } from "aws-sdk"; +import fetch from "fetch-with-proxy"; +import invariant from "invariant"; +import compact from "lodash/compact"; +import { useAgent } from "request-filtering-agent"; +import env from "@server/env"; +import Logger from "@server/logging/Logger"; +import BaseStorage from "./BaseStorage"; + +export default class S3Storage extends BaseStorage { + constructor() { + super(); + + this.client = new AWS.S3({ + s3BucketEndpoint: env.AWS_S3_ACCELERATE_URL ? true : undefined, + s3ForcePathStyle: env.AWS_S3_FORCE_PATH_STYLE, + accessKeyId: env.AWS_ACCESS_KEY_ID, + secretAccessKey: env.AWS_SECRET_ACCESS_KEY, + region: env.AWS_REGION, + endpoint: this.getEndpoint(), + signatureVersion: "v4", + }); + } + + public async getPresignedPost( + key: string, + acl: string, + maxUploadSize: number, + contentType = "image" + ) { + const params = { + Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME, + Conditions: compact([ + ["content-length-range", 0, maxUploadSize], + ["starts-with", "$Content-Type", contentType], + ["starts-with", "$Cache-Control", ""], + ]), + Fields: { + "Content-Disposition": "attachment", + key, + acl, + }, + Expires: 3600, + }; + + return util.promisify(this.client.createPresignedPost).bind(this.client)( + params + ); + } + + public getPublicEndpoint(isServerUpload?: boolean) { + if (env.AWS_S3_ACCELERATE_URL) { + return env.AWS_S3_ACCELERATE_URL; + } + invariant( + env.AWS_S3_UPLOAD_BUCKET_NAME, + "AWS_S3_UPLOAD_BUCKET_NAME is required" + ); + + // lose trailing slash if there is one and convert fake-s3 url to localhost + // for access outside of docker containers in local development + const isDocker = env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/); + + const host = env.AWS_S3_UPLOAD_BUCKET_URL.replace( + "s3:", + "localhost:" + ).replace(/\/$/, ""); + + // support old path-style S3 uploads and new virtual host uploads by checking + // for the bucket name in the endpoint url before appending. + const isVirtualHost = host.includes(env.AWS_S3_UPLOAD_BUCKET_NAME); + + if (isVirtualHost) { + return host; + } + + return `${host}/${isServerUpload && isDocker ? "s3/" : ""}${ + env.AWS_S3_UPLOAD_BUCKET_NAME + }`; + } + + public upload = async ({ + body, + contentLength, + contentType, + key, + acl, + }: { + body: S3.Body; + contentLength: number; + contentType: string; + key: string; + acl: string; + }) => { + invariant( + env.AWS_S3_UPLOAD_BUCKET_NAME, + "AWS_S3_UPLOAD_BUCKET_NAME is required" + ); + + await this.client + .putObject({ + ACL: acl, + Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME, + Key: key, + ContentType: contentType, + ContentLength: contentLength, + ContentDisposition: "attachment", + Body: body, + }) + .promise(); + const endpoint = this.getPublicEndpoint(true); + return `${endpoint}/${key}`; + }; + + public async uploadFromUrl(url: string, key: string, acl: string) { + invariant( + env.AWS_S3_UPLOAD_BUCKET_NAME, + "AWS_S3_UPLOAD_BUCKET_NAME is required" + ); + + const endpoint = this.getPublicEndpoint(true); + if (url.startsWith("/api") || url.startsWith(endpoint)) { + return; + } + + try { + const res = await fetch(url, { + agent: useAgent(url), + }); + const buffer = await res.buffer(); + await this.client + .putObject({ + ACL: acl, + Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME, + Key: key, + ContentType: res.headers["content-type"], + ContentLength: res.headers["content-length"], + ContentDisposition: "attachment", + Body: buffer, + }) + .promise(); + return `${endpoint}/${key}`; + } catch (err) { + Logger.error("Error uploading to S3 from URL", err, { + url, + key, + acl, + }); + return; + } + } + + public async deleteFile(key: string) { + invariant( + env.AWS_S3_UPLOAD_BUCKET_NAME, + "AWS_S3_UPLOAD_BUCKET_NAME is required" + ); + + await this.client + .deleteObject({ + Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME, + Key: key, + }) + .promise(); + } + + public getSignedUrl = async (key: string, expiresIn = 60) => { + const isDocker = env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/); + const params = { + Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME, + Key: key, + Expires: expiresIn, + ResponseContentDisposition: "attachment", + }; + + const url = isDocker + ? `${this.getPublicEndpoint()}/${key}` + : await this.client.getSignedUrlPromise("getObject", params); + + if (env.AWS_S3_ACCELERATE_URL) { + return url.replace( + env.AWS_S3_UPLOAD_BUCKET_URL, + env.AWS_S3_ACCELERATE_URL + ); + } + + return url; + }; + + public getFileStream(key: string) { + invariant( + env.AWS_S3_UPLOAD_BUCKET_NAME, + "AWS_S3_UPLOAD_BUCKET_NAME is required" + ); + + try { + return this.client + .getObject({ + Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME, + Key: key, + }) + .createReadStream(); + } catch (err) { + Logger.error("Error getting file stream from S3 ", err, { + key, + }); + } + + return null; + } + + public async getFileBuffer(key: string) { + invariant( + env.AWS_S3_UPLOAD_BUCKET_NAME, + "AWS_S3_UPLOAD_BUCKET_NAME is required" + ); + + const response = await this.client + .getObject({ + Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME, + Key: key, + }) + .promise(); + + if (response.Body) { + return response.Body as Blob; + } + + throw new Error("Error getting file buffer from S3"); + } + + private client: AWS.S3; + + private getEndpoint() { + if (env.AWS_S3_ACCELERATE_URL) { + return env.AWS_S3_ACCELERATE_URL; + } + + // support old path-style S3 uploads and new virtual host uploads by + // checking for the bucket name in the endpoint url. + if ( + env.AWS_S3_UPLOAD_BUCKET_NAME && + env.AWS_S3_FORCE_PATH_STYLE === false + ) { + const url = new URL(env.AWS_S3_UPLOAD_BUCKET_URL); + if (url.hostname.startsWith(env.AWS_S3_UPLOAD_BUCKET_NAME + ".")) { + return undefined; + } + } + + return new AWS.Endpoint(env.AWS_S3_UPLOAD_BUCKET_URL); + } +} diff --git a/server/storage/files/__mocks__/index.ts b/server/storage/files/__mocks__/index.ts new file mode 100644 index 000000000..07ffc8db2 --- /dev/null +++ b/server/storage/files/__mocks__/index.ts @@ -0,0 +1,9 @@ +export default { + upload: jest.fn().mockReturnValue("/endpoint/key"), + + getPublicEndpoint: jest.fn().mockReturnValue("http://mock"), + + getSignedUrl: jest.fn().mockReturnValue("http://s3mock"), + + getPresignedPost: jest.fn().mockReturnValue({}), +}; diff --git a/server/storage/files/index.ts b/server/storage/files/index.ts new file mode 100644 index 000000000..c87b0b50c --- /dev/null +++ b/server/storage/files/index.ts @@ -0,0 +1,3 @@ +import S3Storage from "./S3Storage"; + +export default new S3Storage(); diff --git a/server/redis.ts b/server/storage/redis.ts similarity index 100% rename from server/redis.ts rename to server/storage/redis.ts diff --git a/server/database/vaults.ts b/server/storage/vaults.ts similarity index 100% rename from server/database/vaults.ts rename to server/storage/vaults.ts diff --git a/server/test/setup.ts b/server/test/setup.ts index b8ae95cbe..479fe4ca2 100644 --- a/server/test/setup.ts +++ b/server/test/setup.ts @@ -1,8 +1,8 @@ -import Redis from "@server/redis"; +import Redis from "@server/storage/redis"; // NOTE: this require must come after the ENV var override // so that sequelize uses the test config variables -require("@server/database/sequelize"); +require("@server/storage/database"); jest.mock("bull"); diff --git a/server/test/support.ts b/server/test/support.ts index 08c8a282e..e924ca8e7 100644 --- a/server/test/support.ts +++ b/server/test/support.ts @@ -1,10 +1,10 @@ import TestServer from "fetch-test-server"; import { v4 as uuidv4 } from "uuid"; import { CollectionPermission } from "@shared/types"; -import { sequelize } from "@server/database/sequelize"; import { User, Document, Collection, Team } from "@server/models"; import onerror from "@server/onerror"; import webService from "@server/services/web"; +import { sequelize } from "@server/storage/database"; export const seed = async () => sequelize.transaction(async (transaction) => { diff --git a/server/utils/RateLimiter.ts b/server/utils/RateLimiter.ts index f18eb5175..2dbe2915c 100644 --- a/server/utils/RateLimiter.ts +++ b/server/utils/RateLimiter.ts @@ -4,7 +4,7 @@ import { RateLimiterMemory, } from "rate-limiter-flexible"; import env from "@server/env"; -import Redis from "@server/redis"; +import Redis from "@server/storage/redis"; export default class RateLimiter { constructor() { diff --git a/server/utils/__mocks__/s3.ts b/server/utils/__mocks__/s3.ts deleted file mode 100644 index 59bfbedaa..000000000 --- a/server/utils/__mocks__/s3.ts +++ /dev/null @@ -1,9 +0,0 @@ -export const uploadToS3 = jest.fn().mockReturnValue("/endpoint/key"); - -export const publicS3Endpoint = jest.fn().mockReturnValue("http://mock"); - -export const getSignedUrl = jest.fn().mockReturnValue("http://s3mock"); - -export const getSignedUrlPromise = jest.fn().mockResolvedValue("http://s3mock"); - -export const getPresignedPost = jest.fn().mockReturnValue({}); diff --git a/server/utils/queue.ts b/server/utils/queue.ts index f325e3087..ef10d52a2 100644 --- a/server/utils/queue.ts +++ b/server/utils/queue.ts @@ -3,7 +3,7 @@ import snakeCase from "lodash/snakeCase"; import { Second } from "@shared/utils/time"; import env from "@server/env"; import Metrics from "@server/logging/Metrics"; -import Redis from "../redis"; +import Redis from "@server/storage/redis"; import ShutdownHelper, { ShutdownOrder } from "./ShutdownHelper"; export function createQueue( diff --git a/server/utils/s3.ts b/server/utils/s3.ts deleted file mode 100644 index 3dbcb3b39..000000000 --- a/server/utils/s3.ts +++ /dev/null @@ -1,217 +0,0 @@ -import util from "util"; -import AWS, { S3 } from "aws-sdk"; -import fetch from "fetch-with-proxy"; -import compact from "lodash/compact"; -import { useAgent } from "request-filtering-agent"; -import { v4 as uuidv4 } from "uuid"; -import Logger from "@server/logging/Logger"; - -const AWS_S3_ACCELERATE_URL = process.env.AWS_S3_ACCELERATE_URL; -const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY; -const AWS_S3_UPLOAD_BUCKET_URL = process.env.AWS_S3_UPLOAD_BUCKET_URL || ""; -const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID; -const AWS_REGION = process.env.AWS_REGION || ""; -const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME || ""; -const AWS_S3_FORCE_PATH_STYLE = process.env.AWS_S3_FORCE_PATH_STYLE !== "false"; - -const s3 = new AWS.S3({ - s3BucketEndpoint: AWS_S3_ACCELERATE_URL ? true : undefined, - s3ForcePathStyle: AWS_S3_FORCE_PATH_STYLE, - accessKeyId: AWS_ACCESS_KEY_ID, - secretAccessKey: AWS_SECRET_ACCESS_KEY, - region: AWS_REGION, - endpoint: AWS_S3_ACCELERATE_URL - ? AWS_S3_ACCELERATE_URL - : AWS_S3_UPLOAD_BUCKET_URL.includes(AWS_S3_UPLOAD_BUCKET_NAME) - ? undefined - : new AWS.Endpoint(AWS_S3_UPLOAD_BUCKET_URL), - signatureVersion: "v4", -}); - -const createPresignedPost: ( - params: S3.PresignedPost.Params -) => Promise = util - .promisify(s3.createPresignedPost) - .bind(s3); - -export const getPresignedPost = ( - key: string, - acl: string, - maxUploadSize: number, - contentType = "image" -) => { - const params = { - Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME, - Conditions: compact([ - ["content-length-range", 0, maxUploadSize], - ["starts-with", "$Content-Type", contentType], - ["starts-with", "$Cache-Control", ""], - ]), - Fields: { - "Content-Disposition": "attachment", - key, - acl, - }, - Expires: 3600, - }; - - return createPresignedPost(params); -}; - -export const publicS3Endpoint = (isServerUpload?: boolean) => { - if (AWS_S3_ACCELERATE_URL) { - return AWS_S3_ACCELERATE_URL; - } - - // lose trailing slash if there is one and convert fake-s3 url to localhost - // for access outside of docker containers in local development - const isDocker = AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/); - - const host = AWS_S3_UPLOAD_BUCKET_URL.replace("s3:", "localhost:").replace( - /\/$/, - "" - ); - - // support old path-style S3 uploads and new virtual host uploads by checking - // for the bucket name in the endpoint url before appending. - const isVirtualHost = host.includes(AWS_S3_UPLOAD_BUCKET_NAME); - - if (isVirtualHost) { - return host; - } - - return `${host}/${ - isServerUpload && isDocker ? "s3/" : "" - }${AWS_S3_UPLOAD_BUCKET_NAME}`; -}; - -export const uploadToS3 = async ({ - body, - contentLength, - contentType, - key, - acl, -}: { - body: S3.Body; - contentLength: number; - contentType: string; - key: string; - acl: string; -}) => { - await s3 - .putObject({ - ACL: acl, - Bucket: AWS_S3_UPLOAD_BUCKET_NAME, - Key: key, - ContentType: contentType, - ContentLength: contentLength, - ContentDisposition: "attachment", - Body: body, - }) - .promise(); - const endpoint = publicS3Endpoint(true); - return `${endpoint}/${key}`; -}; - -export const uploadToS3FromUrl = async ( - url: string, - key: string, - acl: string -) => { - const endpoint = publicS3Endpoint(true); - if (url.startsWith("/api") || url.startsWith(endpoint)) { - return; - } - - try { - const res = await fetch(url, { - agent: useAgent(url), - }); - const buffer = await res.buffer(); - await s3 - .putObject({ - ACL: acl, - Bucket: AWS_S3_UPLOAD_BUCKET_NAME, - Key: key, - ContentType: res.headers["content-type"], - ContentLength: res.headers["content-length"], - ContentDisposition: "attachment", - Body: buffer, - }) - .promise(); - return `${endpoint}/${key}`; - } catch (err) { - Logger.error("Error uploading to S3 from URL", err, { - url, - key, - acl, - }); - return; - } -}; - -export const deleteFromS3 = (key: string) => - s3 - .deleteObject({ - Bucket: AWS_S3_UPLOAD_BUCKET_NAME, - Key: key, - }) - .promise(); - -export const getSignedUrl = async (key: string, expiresIn = 60) => { - const isDocker = AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/); - const params = { - Bucket: AWS_S3_UPLOAD_BUCKET_NAME, - Key: key, - Expires: expiresIn, - ResponseContentDisposition: "attachment", - }; - - const url = isDocker - ? `${publicS3Endpoint()}/${key}` - : await s3.getSignedUrlPromise("getObject", params); - - if (AWS_S3_ACCELERATE_URL) { - return url.replace(AWS_S3_UPLOAD_BUCKET_URL, AWS_S3_ACCELERATE_URL); - } - - return url; -}; - -// function assumes that acl is private -export const getAWSKeyForFileOp = (teamId: string, name: string) => { - const bucket = "uploads"; - return `${bucket}/${teamId}/${uuidv4()}/${name}-export.zip`; -}; - -export const getFileStream = (key: string) => { - try { - return s3 - .getObject({ - Bucket: AWS_S3_UPLOAD_BUCKET_NAME, - Key: key, - }) - .createReadStream(); - } catch (err) { - Logger.error("Error getting file stream from S3 ", err, { - key, - }); - } - - return null; -}; - -export const getFileBuffer = async (key: string) => { - const response = await s3 - .getObject({ - Bucket: AWS_S3_UPLOAD_BUCKET_NAME, - Key: key, - }) - .promise(); - - if (response.Body) { - return response.Body as Blob; - } - - throw new Error("Error getting file buffer from S3"); -}; diff --git a/server/utils/startup.ts b/server/utils/startup.ts index 1dacb54f4..6ec03f9f2 100644 --- a/server/utils/startup.ts +++ b/server/utils/startup.ts @@ -1,10 +1,10 @@ import chalk from "chalk"; import isEmpty from "lodash/isEmpty"; -import { migrations } from "@server/database/sequelize"; import env from "@server/env"; import Logger from "@server/logging/Logger"; import AuthenticationProvider from "@server/models/AuthenticationProvider"; import Team from "@server/models/Team"; +import { migrations } from "@server/storage/database"; export async function checkPendingMigrations() { try { diff --git a/server/utils/updates.ts b/server/utils/updates.ts index 1ef9558f4..80922ffc7 100644 --- a/server/utils/updates.ts +++ b/server/utils/updates.ts @@ -5,7 +5,7 @@ import Collection from "@server/models/Collection"; import Document from "@server/models/Document"; import Team from "@server/models/Team"; import User from "@server/models/User"; -import Redis from "@server/redis"; +import Redis from "@server/storage/redis"; import packageInfo from "../../package.json"; const UPDATES_URL = "https://updates.getoutline.com";