refactor: Upload file to storage, and then pass attachmentId to collections.import

This avoids having large file uploads going directly to the server and allows us to fetch it async into a worker process
This commit is contained in:
Tom Moor
2021-02-18 22:36:07 -08:00
parent 568e271738
commit df233c95a9
12 changed files with 70 additions and 54 deletions

View File

@@ -46,7 +46,8 @@ export const makeCredential = () => {
export const makePolicy = (
credential: string,
longDate: string,
acl: string
acl: string,
contentType: string = "image"
) => {
const tomorrow = addHours(new Date(), 24);
const policy = {
@@ -55,7 +56,7 @@ export const makePolicy = (
["starts-with", "$key", ""],
{ acl },
["content-length-range", 0, +process.env.AWS_S3_UPLOAD_MAX_SIZE],
["starts-with", "$Content-Type", "image"],
["starts-with", "$Content-Type", contentType],
["starts-with", "$Cache-Control", ""],
{ "x-amz-algorithm": "AWS4-HMAC-SHA256" },
{ "x-amz-credential": credential },
@@ -177,7 +178,7 @@ export const getSignedImageUrl = async (key: string) => {
: s3.getSignedUrl("getObject", params);
};
export const getImageByKey = async (key: string) => {
export const getFileByKey = async (key: string) => {
const params = {
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,

View File

@@ -4,7 +4,7 @@ import * as Sentry from "@sentry/node";
import JSZip from "jszip";
import tmp from "tmp";
import { Attachment, Collection, Document } from "../models";
import { getImageByKey } from "./s3";
import { getFileByKey } from "./s3";
async function addToArchive(zip, documents) {
for (const doc of documents) {
@@ -38,7 +38,7 @@ async function addToArchive(zip, documents) {
async function addImageToArchive(zip, key) {
try {
const img = await getImageByKey(key);
const img = await getFileByKey(key);
zip.file(key, img, { createFolders: true });
} catch (err) {
if (process.env.SENTRY_DSN) {