chore: Refactor file storage (#5711)

This commit is contained in:
Tom Moor
2023-08-20 10:04:34 -04:00
committed by GitHub
parent 4354e1055e
commit 74722b80f2
68 changed files with 496 additions and 313 deletions

View File

@@ -2,7 +2,7 @@ import fetch from "fetch-with-proxy";
import env from "@server/env";
import { InternalError } from "@server/errors";
import Logger from "@server/logging/Logger";
import Redis from "@server/redis";
import Redis from "@server/storage/redis";
class Iframely {
private static apiUrl = `${env.IFRAMELY_URL}/api`;

View File

@@ -5,11 +5,11 @@ import {
Extension,
} from "@hocuspocus/server";
import * as Y from "yjs";
import { sequelize } from "@server/database/sequelize";
import Logger from "@server/logging/Logger";
import { trace } from "@server/logging/tracing";
import Document from "@server/models/Document";
import ProsemirrorHelper from "@server/models/helpers/ProsemirrorHelper";
import { sequelize } from "@server/storage/database";
import documentCollaborativeUpdater from "../commands/documentCollaborativeUpdater";
@trace()

View File

@@ -1,7 +1,7 @@
import { Transaction } from "sequelize";
import { v4 as uuidv4 } from "uuid";
import { Attachment, Event, User } from "@server/models";
import { uploadToS3 } from "@server/utils/s3";
import FileStorage from "@server/storage/files";
export default async function attachmentCreator({
id,
@@ -24,7 +24,7 @@ export default async function attachmentCreator({
}) {
const key = `uploads/${user.id}/${uuidv4()}/${name}`;
const acl = process.env.AWS_S3_ACL || "private";
const url = await uploadToS3({
const url = await FileStorage.upload({
body: buffer,
contentType: type,
contentLength: buffer.length,

View File

@@ -1,4 +1,5 @@
import { Transaction } from "sequelize";
import { v4 as uuidv4 } from "uuid";
import {
FileOperationFormat,
FileOperationType,
@@ -6,7 +7,6 @@ import {
} from "@shared/types";
import { traceFunction } from "@server/logging/tracing";
import { Collection, Event, Team, User, FileOperation } from "@server/models";
import { getAWSKeyForFileOp } from "@server/utils/s3";
type Props = {
collection?: Collection;
@@ -18,6 +18,11 @@ type Props = {
transaction: Transaction;
};
function getKeyForFileOp(teamId: string, name: string) {
const bucket = "uploads";
return `${bucket}/${teamId}/${uuidv4()}/${name}-export.zip`;
}
async function collectionExporter({
collection,
team,
@@ -28,7 +33,7 @@ async function collectionExporter({
transaction,
}: Props) {
const collectionId = collection?.id;
const key = getAWSKeyForFileOp(user.teamId, collection?.name || team.name);
const key = getKeyForFileOp(user.teamId, collection?.name || team.name);
const fileOperation = await FileOperation.create(
{
type: FileOperationType.Export,

View File

@@ -2,10 +2,10 @@ import { yDocToProsemirrorJSON } from "@getoutline/y-prosemirror";
import uniq from "lodash/uniq";
import { Node } from "prosemirror-model";
import * as Y from "yjs";
import { sequelize } from "@server/database/sequelize";
import { schema, serializer } from "@server/editor";
import Logger from "@server/logging/Logger";
import { Document, Event } from "@server/models";
import { sequelize } from "@server/storage/database";
type Props = {
/** The document ID to update */

View File

@@ -5,7 +5,7 @@ import { buildUser } from "@server/test/factories";
import { setupTestDatabase } from "@server/test/support";
import documentImporter from "./documentImporter";
jest.mock("../utils/s3");
jest.mock("@server/storage/files");
setupTestDatabase();

View File

@@ -1,5 +1,5 @@
import { sequelize } from "@server/database/sequelize";
import Pin from "@server/models/Pin";
import { sequelize } from "@server/storage/database";
import { buildDocument, buildCollection } from "@server/test/factories";
import { setupTestDatabase, seed } from "@server/test/support";
import documentMover from "./documentMover";

View File

@@ -1,9 +1,9 @@
import uniq from "lodash/uniq";
import { QueryTypes } from "sequelize";
import { sequelize } from "@server/database/sequelize";
import Logger from "@server/logging/Logger";
import { Document, Attachment } from "@server/models";
import DeleteAttachmentTask from "@server/queues/tasks/DeleteAttachmentTask";
import { sequelize } from "@server/storage/database";
import parseAttachmentIds from "@server/utils/parseAttachmentIds";
export default async function documentPermanentDeleter(documents: Document[]) {

View File

@@ -1,5 +1,5 @@
import { sequelize } from "@server/database/sequelize";
import { Event } from "@server/models";
import { sequelize } from "@server/storage/database";
import { buildDocument, buildUser } from "@server/test/factories";
import { setupTestDatabase } from "@server/test/support";
import documentUpdater from "./documentUpdater";

View File

@@ -1,5 +1,5 @@
import { sequelize } from "@server/database/sequelize";
import { FileOperation, Event, User } from "@server/models";
import { sequelize } from "@server/storage/database";
export default async function fileOperationDeleter(
fileOperation: FileOperation,

View File

@@ -1,6 +1,6 @@
import { NotificationEventType } from "@shared/types";
import { sequelize } from "@server/database/sequelize";
import { Event } from "@server/models";
import { sequelize } from "@server/storage/database";
import {
buildUser,
buildNotification,

View File

@@ -1,9 +1,9 @@
import fractionalIndex from "fractional-index";
import { Sequelize, Op, WhereOptions } from "sequelize";
import { PinValidation } from "@shared/validations";
import { sequelize } from "@server/database/sequelize";
import { ValidationError } from "@server/errors";
import { Pin, User, Event } from "@server/models";
import { sequelize } from "@server/storage/database";
type Props = {
/** The user creating the pin */

View File

@@ -1,5 +1,5 @@
import { sequelize } from "@server/database/sequelize";
import { Event, Pin, User } from "@server/models";
import { sequelize } from "@server/storage/database";
type Props = {
/** The user updating the pin */

View File

@@ -1,5 +1,5 @@
import { sequelize } from "@server/database/sequelize";
import { Document, User, Event, Revision } from "@server/models";
import { sequelize } from "@server/storage/database";
export default async function revisionCreator({
document,

View File

@@ -1,5 +1,5 @@
import { sequelize } from "@server/database/sequelize";
import { Star, Event } from "@server/models";
import { sequelize } from "@server/storage/database";
import { buildDocument, buildUser } from "@server/test/factories";
import { setupTestDatabase } from "@server/test/support";
import starCreator from "./starCreator";

View File

@@ -1,6 +1,6 @@
import { Transaction } from "sequelize";
import { sequelize } from "@server/database/sequelize";
import { Event, Star, User } from "@server/models";
import { sequelize } from "@server/storage/database";
type Props = {
/** The user destroying the star */

View File

@@ -1,5 +1,5 @@
import { sequelize } from "@server/database/sequelize";
import { Event, Star, User } from "@server/models";
import { sequelize } from "@server/storage/database";
type Props = {
/** The user updating the star */

View File

@@ -1,5 +1,5 @@
import { sequelize } from "@server/database/sequelize";
import { Subscription, Event } from "@server/models";
import { sequelize } from "@server/storage/database";
import { buildDocument, buildUser } from "@server/test/factories";
import { setupTestDatabase } from "@server/test/support";
import subscriptionCreator from "./subscriptionCreator";

View File

@@ -1,6 +1,6 @@
import { Transaction } from "sequelize";
import { sequelize } from "@server/database/sequelize";
import { Subscription, Event, User, Document } from "@server/models";
import { sequelize } from "@server/storage/database";
import { DocumentEvent, RevisionEvent } from "@server/types";
type Props = {

View File

@@ -1,5 +1,5 @@
import { sequelize } from "@server/database/sequelize";
import { Subscription, Event } from "@server/models";
import { sequelize } from "@server/storage/database";
import {
buildDocument,
buildSubscription,

View File

@@ -1,5 +1,4 @@
import { Transaction } from "sequelize";
import { sequelize } from "@server/database/sequelize";
import Logger from "@server/logging/Logger";
import { traceFunction } from "@server/logging/tracing";
import {
@@ -19,6 +18,7 @@ import {
SearchQuery,
Share,
} from "@server/models";
import { sequelize } from "@server/storage/database";
async function teamPermanentDeleter(team: Team) {
if (!team.deletedAt) {

View File

@@ -1,5 +1,4 @@
import teamCreator from "@server/commands/teamCreator";
import { sequelize } from "@server/database/sequelize";
import env from "@server/env";
import {
DomainNotAllowedError,
@@ -8,6 +7,7 @@ import {
} from "@server/errors";
import { traceFunction } from "@server/logging/tracing";
import { Team, AuthenticationProvider } from "@server/models";
import { sequelize } from "@server/storage/database";
type TeamProvisionerResult = {
team: Team;

View File

@@ -1,8 +1,8 @@
import { sequelize } from "@server/database/sequelize";
import { ValidationError } from "@server/errors";
import { Event, User } from "@server/models";
import type { UserRole } from "@server/models/User";
import CleanupDemotedUserTask from "@server/queues/tasks/CleanupDemotedUserTask";
import { sequelize } from "@server/storage/database";
type Props = {
user: User;

View File

@@ -1,6 +1,6 @@
import { Op } from "sequelize";
import { sequelize } from "@server/database/sequelize";
import { Event, User } from "@server/models";
import { sequelize } from "@server/storage/database";
import { ValidationError } from "../errors";
export default async function userDestroyer({

View File

@@ -1,4 +1,3 @@
import { sequelize } from "@server/database/sequelize";
import InviteAcceptedEmail from "@server/emails/templates/InviteAcceptedEmail";
import {
DomainNotAllowedError,
@@ -6,6 +5,7 @@ import {
InviteRequiredError,
} from "@server/errors";
import { Event, Team, User, UserAuthentication } from "@server/models";
import { sequelize } from "@server/storage/database";
type UserProvisionerResult = {
user: User;

View File

@@ -1,7 +1,7 @@
import { Transaction } from "sequelize";
import { sequelize } from "@server/database/sequelize";
import { User, Event, GroupUser } from "@server/models";
import CleanupDemotedUserTask from "@server/queues/tasks/CleanupDemotedUserTask";
import { sequelize } from "@server/storage/database";
import { ValidationError } from "../errors";
type Props = {

View File

@@ -1,6 +1,6 @@
import { Transaction } from "sequelize";
import { sequelize } from "@server/database/sequelize";
import { User, Event } from "@server/models";
import { sequelize } from "@server/storage/database";
import { ValidationError } from "../errors";
type Props = {

View File

@@ -568,10 +568,35 @@ export class Environment {
public AWS_S3_UPLOAD_MAX_SIZE =
this.toOptionalNumber(process.env.AWS_S3_UPLOAD_MAX_SIZE) ?? 100000000;
/**
* Access key ID for AWS S3.
*/
@IsOptional()
public AWS_ACCESS_KEY_ID = this.toOptionalString(
process.env.AWS_ACCESS_KEY_ID
);
/**
* Secret key for AWS S3.
*/
@IsOptional()
@CannotUseWithout("AWS_ACCESS_KEY_ID")
public AWS_SECRET_ACCESS_KEY = this.toOptionalString(
process.env.AWS_SECRET_ACCESS_KEY
);
/**
* The name of the AWS S3 region to use.
*/
@IsOptional()
@CannotUseWithout("AWS_ACCESS_KEY_ID")
public AWS_REGION = this.toOptionalString(process.env.AWS_REGION);
/**
* Optional AWS S3 endpoint URL for file attachments.
*/
@IsOptional()
@CannotUseWithout("AWS_ACCESS_KEY_ID")
public AWS_S3_ACCELERATE_URL = this.toOptionalString(
process.env.AWS_S3_ACCELERATE_URL
);
@@ -580,8 +605,26 @@ export class Environment {
* Optional AWS S3 endpoint URL for file attachments.
*/
@IsOptional()
public AWS_S3_UPLOAD_BUCKET_URL = this.toOptionalString(
process.env.AWS_S3_UPLOAD_BUCKET_URL
@CannotUseWithout("AWS_ACCESS_KEY_ID")
public AWS_S3_UPLOAD_BUCKET_URL = process.env.AWS_S3_UPLOAD_BUCKET_URL ?? "";
/**
* The bucket name to store file attachments in.
*/
@IsOptional()
@CannotUseWithout("AWS_ACCESS_KEY_ID")
public AWS_S3_UPLOAD_BUCKET_NAME = this.toOptionalString(
process.env.AWS_S3_UPLOAD_BUCKET_NAME
);
/**
* Whether to force path style URLs for S3 objects, this is required for some
* S3-compatible storage providers.
*/
@IsOptional()
@CannotUseWithout("AWS_ACCESS_KEY_ID")
public AWS_S3_FORCE_PATH_STYLE = this.toBoolean(
process.env.AWS_S3_FORCE_PATH_STYLE ?? "true"
);
/**

View File

@@ -23,8 +23,8 @@ import { checkEnv, checkPendingMigrations } from "./utils/startup";
import { checkUpdates } from "./utils/updates";
import onerror from "./onerror";
import ShutdownHelper, { ShutdownOrder } from "./utils/ShutdownHelper";
import { sequelize } from "./database/sequelize";
import RedisAdapter from "./redis";
import { sequelize } from "./storage/database";
import RedisAdapter from "./storage/redis";
import Metrics from "./logging/Metrics";
// The default is to run all services to make development and OSS installations

View File

@@ -4,7 +4,7 @@ import env from "@server/env";
import { RateLimitExceededError } from "@server/errors";
import Logger from "@server/logging/Logger";
import Metrics from "@server/logging/Metrics";
import Redis from "@server/redis";
import Redis from "@server/storage/redis";
import RateLimiter from "@server/utils/RateLimiter";
/**

View File

@@ -1,6 +1,6 @@
import { Next } from "koa";
import { Transaction } from "sequelize";
import { sequelize } from "@server/database/sequelize";
import { sequelize } from "@server/storage/database";
import { AppContext } from "@server/types";
/**

View File

@@ -10,13 +10,7 @@ import {
DataType,
IsNumeric,
} from "sequelize-typescript";
import {
publicS3Endpoint,
deleteFromS3,
getFileStream,
getSignedUrl,
getFileBuffer,
} from "@server/utils/s3";
import FileStorage from "@server/storage/files";
import Document from "./Document";
import Team from "./Team";
import User from "./User";
@@ -76,14 +70,14 @@ class Attachment extends IdModel {
* Get the contents of this attachment as a readable stream.
*/
get stream() {
return getFileStream(this.key);
return FileStorage.getFileStream(this.key);
}
/**
* Get the contents of this attachment as a buffer.
*/
get buffer() {
return getFileBuffer(this.key);
return FileStorage.getFileBuffer(this.key);
}
/**
@@ -105,21 +99,21 @@ class Attachment extends IdModel {
* a signed URL must be used.
*/
get canonicalUrl() {
return encodeURI(`${publicS3Endpoint()}/${this.key}`);
return encodeURI(`${FileStorage.getPublicEndpoint()}/${this.key}`);
}
/**
* Get a signed URL with the default expirt to download the attachment from storage.
*/
get signedUrl() {
return getSignedUrl(this.key);
return FileStorage.getSignedUrl(this.key);
}
// hooks
@BeforeDestroy
static async deleteAttachmentFromS3(model: Attachment) {
await deleteFromS3(model.key);
await FileStorage.deleteFile(model.key);
}
// associations

View File

@@ -13,7 +13,7 @@ import {
FileOperationState,
FileOperationType,
} from "@shared/types";
import { deleteFromS3, getFileStream } from "@server/utils/s3";
import FileStorage from "@server/storage/files";
import Collection from "./Collection";
import Team from "./Team";
import User from "./User";
@@ -67,7 +67,7 @@ class FileOperation extends IdModel {
expire = async function () {
this.state = FileOperationState.Expired;
try {
await deleteFromS3(this.key);
await FileStorage.deleteFile(this.key);
} catch (err) {
if (err.retryable) {
throw err;
@@ -80,14 +80,14 @@ class FileOperation extends IdModel {
* The file operation contents as a readable stream.
*/
get stream() {
return getFileStream(this.key);
return FileStorage.getFileStream(this.key);
}
// hooks
@BeforeDestroy
static async deleteFileFromS3(model: FileOperation) {
await deleteFromS3(model.key);
await FileStorage.deleteFile(model.key);
}
// associations

View File

@@ -1,6 +1,6 @@
import isNil from "lodash/isNil";
import vaults from "@server/database/vaults";
import Logger from "@server/logging/Logger";
import vaults from "@server/storage/vaults";
const key = "sequelize:vault";

View File

@@ -19,9 +19,9 @@ import { trace } from "@server/logging/tracing";
import type Document from "@server/models/Document";
import type Revision from "@server/models/Revision";
import User from "@server/models/User";
import FileStorage from "@server/storage/files";
import diff from "@server/utils/diff";
import parseAttachmentIds from "@server/utils/parseAttachmentIds";
import { getSignedUrl } from "@server/utils/s3";
import Attachment from "../Attachment";
import ProsemirrorHelper from "./ProsemirrorHelper";
@@ -324,7 +324,10 @@ export default class DocumentHelper {
});
if (attachment) {
const signedUrl = await getSignedUrl(attachment.key, expiresIn);
const signedUrl = await FileStorage.getSignedUrl(
attachment.key,
expiresIn
);
text = text.replace(
new RegExp(escapeRegExp(attachment.redirectUrl), "g"),
signedUrl

View File

@@ -5,12 +5,12 @@ import map from "lodash/map";
import queryParser from "pg-tsquery";
import { Op, QueryTypes, WhereOptions } from "sequelize";
import { DateFilter } from "@shared/types";
import { sequelize } from "@server/database/sequelize";
import Collection from "@server/models/Collection";
import Document from "@server/models/Document";
import Share from "@server/models/Share";
import Team from "@server/models/Team";
import User from "@server/models/User";
import { sequelize } from "@server/storage/database";
type SearchResponse = {
results: {

View File

@@ -1,6 +1,6 @@
import { sequelize } from "@server/database/sequelize";
import Logger from "@server/logging/Logger";
import { WebhookSubscription, ApiKey, User } from "@server/models";
import { sequelize } from "@server/storage/database";
import BaseTask from "./BaseTask";
type Props = {

View File

@@ -1,9 +1,9 @@
import { NotificationEventType } from "@shared/types";
import subscriptionCreator from "@server/commands/subscriptionCreator";
import { sequelize } from "@server/database/sequelize";
import { Comment, Document, Notification, User } from "@server/models";
import NotificationHelper from "@server/models/helpers/NotificationHelper";
import ProsemirrorHelper from "@server/models/helpers/ProsemirrorHelper";
import { sequelize } from "@server/storage/database";
import { CommentEvent } from "@server/types";
import BaseTask, { TaskPriority } from "./BaseTask";

View File

@@ -1,7 +1,7 @@
import { Op } from "sequelize";
import documentMover from "@server/commands/documentMover";
import { sequelize } from "@server/database/sequelize";
import { Collection, Document, User } from "@server/models";
import { sequelize } from "@server/storage/database";
import BaseTask from "./BaseTask";
type Props = {

View File

@@ -6,7 +6,7 @@ import ExportSuccessEmail from "@server/emails/templates/ExportSuccessEmail";
import Logger from "@server/logging/Logger";
import { Collection, Event, FileOperation, Team, User } from "@server/models";
import fileOperationPresenter from "@server/presenters/fileOperation";
import { uploadToS3 } from "@server/utils/s3";
import FileStorage from "@server/storage/files";
import BaseTask, { TaskPriority } from "./BaseTask";
type Props = {
@@ -60,7 +60,7 @@ export default abstract class ExportTask extends BaseTask<Props> {
});
const stat = await fs.promises.stat(filePath);
const url = await uploadToS3({
const url = await FileStorage.upload({
body: fs.createReadStream(filePath),
contentLength: stat.size,
contentType: "application/zip",

View File

@@ -1,4 +1,3 @@
import { S3 } from "aws-sdk";
import truncate from "lodash/truncate";
import {
CollectionPermission,
@@ -8,7 +7,6 @@ import {
import { CollectionValidation } from "@shared/validations";
import attachmentCreator from "@server/commands/attachmentCreator";
import documentCreator from "@server/commands/documentCreator";
import { sequelize } from "@server/database/sequelize";
import { serializer } from "@server/editor";
import { InternalError, ValidationError } from "@server/errors";
import Logger from "@server/logging/Logger";
@@ -20,6 +18,7 @@ import {
FileOperation,
Attachment,
} from "@server/models";
import { sequelize } from "@server/storage/database";
import BaseTask, { TaskPriority } from "./BaseTask";
type Props = {
@@ -207,7 +206,7 @@ export default abstract class ImportTask extends BaseTask<Props> {
* @returns A promise that resolves to the structured data
*/
protected abstract parseData(
data: S3.Body,
data: Buffer | NodeJS.ReadableStream,
fileOperation: FileOperation
): Promise<StructuredImportData>;

View File

@@ -1,9 +1,9 @@
import { subDays } from "date-fns";
import { Op } from "sequelize";
import { sequelize } from "@server/database/sequelize";
import InviteReminderEmail from "@server/emails/templates/InviteReminderEmail";
import { User } from "@server/models";
import { UserFlag } from "@server/models/User";
import { sequelize } from "@server/storage/database";
import BaseTask, { TaskPriority, TaskSchedule } from "./BaseTask";
type Props = undefined;

View File

@@ -1,6 +1,6 @@
import { v4 as uuidv4 } from "uuid";
import { Team } from "@server/models";
import { uploadToS3FromUrl } from "@server/utils/s3";
import FileStorage from "@server/storage/files";
import BaseTask, { TaskPriority } from "./BaseTask";
type Props = {
@@ -20,7 +20,7 @@ export default class UploadTeamAvatarTask extends BaseTask<Props> {
rejectOnEmpty: true,
});
const avatarUrl = await uploadToS3FromUrl(
const avatarUrl = await FileStorage.uploadFromUrl(
props.avatarUrl,
`avatars/${team.id}/${uuidv4()}`,
"public-read"

View File

@@ -1,6 +1,6 @@
import { v4 as uuidv4 } from "uuid";
import { User } from "@server/models";
import { uploadToS3FromUrl } from "@server/utils/s3";
import FileStorage from "@server/storage/files";
import BaseTask, { TaskPriority } from "./BaseTask";
type Props = {
@@ -20,7 +20,7 @@ export default class UploadUserAvatarTask extends BaseTask<Props> {
rejectOnEmpty: true,
});
const avatarUrl = await uploadToS3FromUrl(
const avatarUrl = await FileStorage.uploadFromUrl(
props.avatarUrl,
`avatars/${user.id}/${uuidv4()}`,
"public-read"

View File

@@ -1,6 +1,6 @@
import { sequelize } from "@server/database/sequelize";
import Logger from "@server/logging/Logger";
import { User, UserAuthentication } from "@server/models";
import { sequelize } from "@server/storage/database";
import BaseTask, { TaskPriority } from "./BaseTask";
type Props = {

View File

@@ -11,7 +11,7 @@ import {
} from "@server/test/factories";
import { getTestServer } from "@server/test/support";
jest.mock("@server/utils/s3");
jest.mock("@server/storage/files");
const server = getTestServer();

View File

@@ -12,9 +12,9 @@ import { Attachment, Document, Event } from "@server/models";
import AttachmentHelper from "@server/models/helpers/AttachmentHelper";
import { authorize } from "@server/policies";
import { presentAttachment } from "@server/presenters";
import FileStorage from "@server/storage/files";
import { APIContext } from "@server/types";
import { RateLimiterStrategy } from "@server/utils/RateLimiter";
import { getPresignedPost, publicS3Endpoint } from "@server/utils/s3";
import { assertIn } from "@server/validation";
import * as T from "./schema";
@@ -90,7 +90,7 @@ router.post(
{ transaction }
);
const presignedPost = await getPresignedPost(
const presignedPost = await FileStorage.getPresignedPost(
key,
acl,
maxUploadSize,
@@ -99,7 +99,7 @@ router.post(
ctx.body = {
data: {
uploadUrl: publicS3Endpoint(),
uploadUrl: FileStorage.getPublicEndpoint(),
form: {
"Cache-Control": "max-age=31557600",
"Content-Type": contentType,

View File

@@ -12,7 +12,7 @@ import { getTestServer } from "@server/test/support";
const server = getTestServer();
jest.mock("@server/utils/s3");
jest.mock("@server/storage/files");
describe("#fileOperations.info", () => {
it("should return fileOperation", async () => {

View File

@@ -7,8 +7,8 @@ import validate from "@server/middlewares/validate";
import { FileOperation, Team } from "@server/models";
import { authorize } from "@server/policies";
import { presentFileOperation } from "@server/presenters";
import FileStorage from "@server/storage/files";
import { APIContext } from "@server/types";
import { getSignedUrl } from "@server/utils/s3";
import pagination from "../middlewares/pagination";
import * as T from "./schema";
@@ -84,7 +84,7 @@ const handleFileOperationsRedirect = async (
throw ValidationError(`${fileOperation.type} is not complete yet`);
}
const accessUrl = await getSignedUrl(fileOperation.key);
const accessUrl = await FileStorage.getSignedUrl(fileOperation.key);
ctx.redirect(accessUrl);
};

View File

@@ -1,7 +1,7 @@
import "./bootstrap";
import { QueryTypes } from "sequelize";
import { sequelize } from "@server/database/sequelize";
import { User } from "@server/models";
import { sequelize } from "@server/storage/database";
const limit = 100;
let page = parseInt(process.argv[2], 10);

View File

@@ -5,6 +5,6 @@ if (process.env.NODE_ENV !== "test") {
});
}
require("../database/sequelize");
require("../storage/database");
export {};

View File

@@ -1,8 +1,8 @@
import "./bootstrap";
import teamCreator from "@server/commands/teamCreator";
import { sequelize } from "@server/database/sequelize";
import env from "@server/env";
import { Team, User } from "@server/models";
import { sequelize } from "@server/storage/database";
const email = process.argv[2];

View File

@@ -11,11 +11,11 @@ import * as Tracing from "@server/logging/tracer";
import { traceFunction } from "@server/logging/tracing";
import { Collection, User } from "@server/models";
import { can } from "@server/policies";
import Redis from "@server/storage/redis";
import ShutdownHelper, { ShutdownOrder } from "@server/utils/ShutdownHelper";
import { getUserForJWT } from "@server/utils/jwt";
import { websocketQueue } from "../queues";
import WebsocketsProcessor from "../queues/processors/WebsocketsProcessor";
import Redis from "../redis";
type SocketWithAuth = IO.Socket & {
client: IO.Socket["client"] & {

View File

@@ -0,0 +1,99 @@
import { Readable } from "stream";
import { PresignedPost } from "aws-sdk/clients/s3";
export default abstract class BaseStorage {
/**
* Returns a presigned post for uploading files to the storage provider.
*
* @param key The path to store the file at
* @param acl The ACL to use
* @param maxUploadSize The maximum upload size in bytes
* @param contentType The content type of the file
* @returns The presigned post object to use on the client (TODO: Abstract away from S3)
*/
public abstract getPresignedPost(
key: string,
acl: string,
maxUploadSize: number,
contentType: string
): Promise<PresignedPost>;
/**
* Returns a stream for reading a file from the storage provider.
*
* @param key The path to the file
*/
public abstract getFileStream(key: string): NodeJS.ReadableStream | null;
/**
* Returns a buffer of a file from the storage provider.
*
* @param key The path to the file
*/
public abstract getFileBuffer(key: string): Promise<Blob>;
/**
* Returns the public endpoint for the storage provider.
*
* @param isServerUpload Whether the upload is happening on the server or not
* @returns The public endpoint as a string
*/
public abstract getPublicEndpoint(isServerUpload?: boolean): string;
/**
* Returns a signed URL for a file from the storage provider.
*
* @param key The path to the file
* @param expiresIn An optional number of seconds until the URL expires
*/
public abstract getSignedUrl(
key: string,
expiresIn?: number
): Promise<string>;
/**
* Upload a file to the storage provider.
*
* @param body The file body
* @param contentLength The content length of the file
* @param contentType The content type of the file
* @param key The path to store the file at
* @param acl The ACL to use
* @returns The URL of the file
*/
public abstract upload({
body,
contentLength,
contentType,
key,
acl,
}: {
body: Buffer | Uint8Array | Blob | string | Readable;
contentLength: number;
contentType: string;
key: string;
acl: string;
}): Promise<string | undefined>;
/**
* Upload a file to the storage provider directly from a remote URL.
*
* @param url The URL to upload from
* @param key The path to store the file at
* @param acl The ACL to use
* @returns The URL of the file
*/
public abstract uploadFromUrl(
url: string,
key: string,
acl: string
): Promise<string | undefined>;
/**
* Delete a file from the storage provider.
*
* @param key The path to the file
* @returns A promise that resolves when the file is deleted
*/
public abstract deleteFile(key: string): Promise<void>;
}

View File

@@ -0,0 +1,254 @@
import util from "util";
import AWS, { S3 } from "aws-sdk";
import fetch from "fetch-with-proxy";
import invariant from "invariant";
import compact from "lodash/compact";
import { useAgent } from "request-filtering-agent";
import env from "@server/env";
import Logger from "@server/logging/Logger";
import BaseStorage from "./BaseStorage";
export default class S3Storage extends BaseStorage {
constructor() {
super();
this.client = new AWS.S3({
s3BucketEndpoint: env.AWS_S3_ACCELERATE_URL ? true : undefined,
s3ForcePathStyle: env.AWS_S3_FORCE_PATH_STYLE,
accessKeyId: env.AWS_ACCESS_KEY_ID,
secretAccessKey: env.AWS_SECRET_ACCESS_KEY,
region: env.AWS_REGION,
endpoint: this.getEndpoint(),
signatureVersion: "v4",
});
}
public async getPresignedPost(
key: string,
acl: string,
maxUploadSize: number,
contentType = "image"
) {
const params = {
Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME,
Conditions: compact([
["content-length-range", 0, maxUploadSize],
["starts-with", "$Content-Type", contentType],
["starts-with", "$Cache-Control", ""],
]),
Fields: {
"Content-Disposition": "attachment",
key,
acl,
},
Expires: 3600,
};
return util.promisify(this.client.createPresignedPost).bind(this.client)(
params
);
}
public getPublicEndpoint(isServerUpload?: boolean) {
if (env.AWS_S3_ACCELERATE_URL) {
return env.AWS_S3_ACCELERATE_URL;
}
invariant(
env.AWS_S3_UPLOAD_BUCKET_NAME,
"AWS_S3_UPLOAD_BUCKET_NAME is required"
);
// lose trailing slash if there is one and convert fake-s3 url to localhost
// for access outside of docker containers in local development
const isDocker = env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/);
const host = env.AWS_S3_UPLOAD_BUCKET_URL.replace(
"s3:",
"localhost:"
).replace(/\/$/, "");
// support old path-style S3 uploads and new virtual host uploads by checking
// for the bucket name in the endpoint url before appending.
const isVirtualHost = host.includes(env.AWS_S3_UPLOAD_BUCKET_NAME);
if (isVirtualHost) {
return host;
}
return `${host}/${isServerUpload && isDocker ? "s3/" : ""}${
env.AWS_S3_UPLOAD_BUCKET_NAME
}`;
}
public upload = async ({
body,
contentLength,
contentType,
key,
acl,
}: {
body: S3.Body;
contentLength: number;
contentType: string;
key: string;
acl: string;
}) => {
invariant(
env.AWS_S3_UPLOAD_BUCKET_NAME,
"AWS_S3_UPLOAD_BUCKET_NAME is required"
);
await this.client
.putObject({
ACL: acl,
Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
ContentType: contentType,
ContentLength: contentLength,
ContentDisposition: "attachment",
Body: body,
})
.promise();
const endpoint = this.getPublicEndpoint(true);
return `${endpoint}/${key}`;
};
public async uploadFromUrl(url: string, key: string, acl: string) {
invariant(
env.AWS_S3_UPLOAD_BUCKET_NAME,
"AWS_S3_UPLOAD_BUCKET_NAME is required"
);
const endpoint = this.getPublicEndpoint(true);
if (url.startsWith("/api") || url.startsWith(endpoint)) {
return;
}
try {
const res = await fetch(url, {
agent: useAgent(url),
});
const buffer = await res.buffer();
await this.client
.putObject({
ACL: acl,
Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
ContentType: res.headers["content-type"],
ContentLength: res.headers["content-length"],
ContentDisposition: "attachment",
Body: buffer,
})
.promise();
return `${endpoint}/${key}`;
} catch (err) {
Logger.error("Error uploading to S3 from URL", err, {
url,
key,
acl,
});
return;
}
}
public async deleteFile(key: string) {
invariant(
env.AWS_S3_UPLOAD_BUCKET_NAME,
"AWS_S3_UPLOAD_BUCKET_NAME is required"
);
await this.client
.deleteObject({
Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
})
.promise();
}
public getSignedUrl = async (key: string, expiresIn = 60) => {
const isDocker = env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/);
const params = {
Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
Expires: expiresIn,
ResponseContentDisposition: "attachment",
};
const url = isDocker
? `${this.getPublicEndpoint()}/${key}`
: await this.client.getSignedUrlPromise("getObject", params);
if (env.AWS_S3_ACCELERATE_URL) {
return url.replace(
env.AWS_S3_UPLOAD_BUCKET_URL,
env.AWS_S3_ACCELERATE_URL
);
}
return url;
};
public getFileStream(key: string) {
invariant(
env.AWS_S3_UPLOAD_BUCKET_NAME,
"AWS_S3_UPLOAD_BUCKET_NAME is required"
);
try {
return this.client
.getObject({
Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
})
.createReadStream();
} catch (err) {
Logger.error("Error getting file stream from S3 ", err, {
key,
});
}
return null;
}
public async getFileBuffer(key: string) {
invariant(
env.AWS_S3_UPLOAD_BUCKET_NAME,
"AWS_S3_UPLOAD_BUCKET_NAME is required"
);
const response = await this.client
.getObject({
Bucket: env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
})
.promise();
if (response.Body) {
return response.Body as Blob;
}
throw new Error("Error getting file buffer from S3");
}
private client: AWS.S3;
private getEndpoint() {
if (env.AWS_S3_ACCELERATE_URL) {
return env.AWS_S3_ACCELERATE_URL;
}
// support old path-style S3 uploads and new virtual host uploads by
// checking for the bucket name in the endpoint url.
if (
env.AWS_S3_UPLOAD_BUCKET_NAME &&
env.AWS_S3_FORCE_PATH_STYLE === false
) {
const url = new URL(env.AWS_S3_UPLOAD_BUCKET_URL);
if (url.hostname.startsWith(env.AWS_S3_UPLOAD_BUCKET_NAME + ".")) {
return undefined;
}
}
return new AWS.Endpoint(env.AWS_S3_UPLOAD_BUCKET_URL);
}
}

View File

@@ -0,0 +1,9 @@
export default {
upload: jest.fn().mockReturnValue("/endpoint/key"),
getPublicEndpoint: jest.fn().mockReturnValue("http://mock"),
getSignedUrl: jest.fn().mockReturnValue("http://s3mock"),
getPresignedPost: jest.fn().mockReturnValue({}),
};

View File

@@ -0,0 +1,3 @@
import S3Storage from "./S3Storage";
export default new S3Storage();

View File

@@ -1,8 +1,8 @@
import Redis from "@server/redis";
import Redis from "@server/storage/redis";
// NOTE: this require must come after the ENV var override
// so that sequelize uses the test config variables
require("@server/database/sequelize");
require("@server/storage/database");
jest.mock("bull");

View File

@@ -1,10 +1,10 @@
import TestServer from "fetch-test-server";
import { v4 as uuidv4 } from "uuid";
import { CollectionPermission } from "@shared/types";
import { sequelize } from "@server/database/sequelize";
import { User, Document, Collection, Team } from "@server/models";
import onerror from "@server/onerror";
import webService from "@server/services/web";
import { sequelize } from "@server/storage/database";
export const seed = async () =>
sequelize.transaction(async (transaction) => {

View File

@@ -4,7 +4,7 @@ import {
RateLimiterMemory,
} from "rate-limiter-flexible";
import env from "@server/env";
import Redis from "@server/redis";
import Redis from "@server/storage/redis";
export default class RateLimiter {
constructor() {

View File

@@ -1,9 +0,0 @@
export const uploadToS3 = jest.fn().mockReturnValue("/endpoint/key");
export const publicS3Endpoint = jest.fn().mockReturnValue("http://mock");
export const getSignedUrl = jest.fn().mockReturnValue("http://s3mock");
export const getSignedUrlPromise = jest.fn().mockResolvedValue("http://s3mock");
export const getPresignedPost = jest.fn().mockReturnValue({});

View File

@@ -3,7 +3,7 @@ import snakeCase from "lodash/snakeCase";
import { Second } from "@shared/utils/time";
import env from "@server/env";
import Metrics from "@server/logging/Metrics";
import Redis from "../redis";
import Redis from "@server/storage/redis";
import ShutdownHelper, { ShutdownOrder } from "./ShutdownHelper";
export function createQueue(

View File

@@ -1,217 +0,0 @@
import util from "util";
import AWS, { S3 } from "aws-sdk";
import fetch from "fetch-with-proxy";
import compact from "lodash/compact";
import { useAgent } from "request-filtering-agent";
import { v4 as uuidv4 } from "uuid";
import Logger from "@server/logging/Logger";
const AWS_S3_ACCELERATE_URL = process.env.AWS_S3_ACCELERATE_URL;
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
const AWS_S3_UPLOAD_BUCKET_URL = process.env.AWS_S3_UPLOAD_BUCKET_URL || "";
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
const AWS_REGION = process.env.AWS_REGION || "";
const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME || "";
const AWS_S3_FORCE_PATH_STYLE = process.env.AWS_S3_FORCE_PATH_STYLE !== "false";
const s3 = new AWS.S3({
s3BucketEndpoint: AWS_S3_ACCELERATE_URL ? true : undefined,
s3ForcePathStyle: AWS_S3_FORCE_PATH_STYLE,
accessKeyId: AWS_ACCESS_KEY_ID,
secretAccessKey: AWS_SECRET_ACCESS_KEY,
region: AWS_REGION,
endpoint: AWS_S3_ACCELERATE_URL
? AWS_S3_ACCELERATE_URL
: AWS_S3_UPLOAD_BUCKET_URL.includes(AWS_S3_UPLOAD_BUCKET_NAME)
? undefined
: new AWS.Endpoint(AWS_S3_UPLOAD_BUCKET_URL),
signatureVersion: "v4",
});
const createPresignedPost: (
params: S3.PresignedPost.Params
) => Promise<S3.PresignedPost> = util
.promisify(s3.createPresignedPost)
.bind(s3);
export const getPresignedPost = (
key: string,
acl: string,
maxUploadSize: number,
contentType = "image"
) => {
const params = {
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Conditions: compact([
["content-length-range", 0, maxUploadSize],
["starts-with", "$Content-Type", contentType],
["starts-with", "$Cache-Control", ""],
]),
Fields: {
"Content-Disposition": "attachment",
key,
acl,
},
Expires: 3600,
};
return createPresignedPost(params);
};
export const publicS3Endpoint = (isServerUpload?: boolean) => {
if (AWS_S3_ACCELERATE_URL) {
return AWS_S3_ACCELERATE_URL;
}
// lose trailing slash if there is one and convert fake-s3 url to localhost
// for access outside of docker containers in local development
const isDocker = AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/);
const host = AWS_S3_UPLOAD_BUCKET_URL.replace("s3:", "localhost:").replace(
/\/$/,
""
);
// support old path-style S3 uploads and new virtual host uploads by checking
// for the bucket name in the endpoint url before appending.
const isVirtualHost = host.includes(AWS_S3_UPLOAD_BUCKET_NAME);
if (isVirtualHost) {
return host;
}
return `${host}/${
isServerUpload && isDocker ? "s3/" : ""
}${AWS_S3_UPLOAD_BUCKET_NAME}`;
};
export const uploadToS3 = async ({
body,
contentLength,
contentType,
key,
acl,
}: {
body: S3.Body;
contentLength: number;
contentType: string;
key: string;
acl: string;
}) => {
await s3
.putObject({
ACL: acl,
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
ContentType: contentType,
ContentLength: contentLength,
ContentDisposition: "attachment",
Body: body,
})
.promise();
const endpoint = publicS3Endpoint(true);
return `${endpoint}/${key}`;
};
export const uploadToS3FromUrl = async (
url: string,
key: string,
acl: string
) => {
const endpoint = publicS3Endpoint(true);
if (url.startsWith("/api") || url.startsWith(endpoint)) {
return;
}
try {
const res = await fetch(url, {
agent: useAgent(url),
});
const buffer = await res.buffer();
await s3
.putObject({
ACL: acl,
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
ContentType: res.headers["content-type"],
ContentLength: res.headers["content-length"],
ContentDisposition: "attachment",
Body: buffer,
})
.promise();
return `${endpoint}/${key}`;
} catch (err) {
Logger.error("Error uploading to S3 from URL", err, {
url,
key,
acl,
});
return;
}
};
export const deleteFromS3 = (key: string) =>
s3
.deleteObject({
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
})
.promise();
export const getSignedUrl = async (key: string, expiresIn = 60) => {
const isDocker = AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/);
const params = {
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
Expires: expiresIn,
ResponseContentDisposition: "attachment",
};
const url = isDocker
? `${publicS3Endpoint()}/${key}`
: await s3.getSignedUrlPromise("getObject", params);
if (AWS_S3_ACCELERATE_URL) {
return url.replace(AWS_S3_UPLOAD_BUCKET_URL, AWS_S3_ACCELERATE_URL);
}
return url;
};
// function assumes that acl is private
export const getAWSKeyForFileOp = (teamId: string, name: string) => {
const bucket = "uploads";
return `${bucket}/${teamId}/${uuidv4()}/${name}-export.zip`;
};
export const getFileStream = (key: string) => {
try {
return s3
.getObject({
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
})
.createReadStream();
} catch (err) {
Logger.error("Error getting file stream from S3 ", err, {
key,
});
}
return null;
};
export const getFileBuffer = async (key: string) => {
const response = await s3
.getObject({
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
})
.promise();
if (response.Body) {
return response.Body as Blob;
}
throw new Error("Error getting file buffer from S3");
};

View File

@@ -1,10 +1,10 @@
import chalk from "chalk";
import isEmpty from "lodash/isEmpty";
import { migrations } from "@server/database/sequelize";
import env from "@server/env";
import Logger from "@server/logging/Logger";
import AuthenticationProvider from "@server/models/AuthenticationProvider";
import Team from "@server/models/Team";
import { migrations } from "@server/storage/database";
export async function checkPendingMigrations() {
try {

View File

@@ -5,7 +5,7 @@ import Collection from "@server/models/Collection";
import Document from "@server/models/Document";
import Team from "@server/models/Team";
import User from "@server/models/User";
import Redis from "@server/redis";
import Redis from "@server/storage/redis";
import packageInfo from "../../package.json";
const UPDATES_URL = "https://updates.getoutline.com";