feat: private content (#1137)

* save images as private and serve via signed url from images.info api

* download private images to directory on export

* fix lint errors

* private s3 default, AWS.s3 module level scope, default s3 url expiry

* combine regex to one, and only replace when there are matches

* fix lint

* code not needed anymore, remove

* updates after pulling master

* revert the uploadToS3FromUrl url return

* use model gettr to compact code, rename to attachments api

* basic checking of document read permission to allow attachment viewing

* fix: Continue to upload avatars as public
fix: Allow redirect for non-private attachments

* add support for publicly shared documents

* catch errors which crash the app during zip export and user creation

* add tests

* enable AWS signature v4 for s3

* switch to use factories to build models for testing

* add isDocker flag for local serving of attachment redirect url

* fix redirect tests

Co-authored-by: Tom Moor <tom.moor@gmail.com>
This commit is contained in:
Huss
2020-02-13 03:40:44 +00:00
committed by GitHub
parent 064d8cea44
commit 8e2b19dc7a
15 changed files with 316 additions and 39 deletions

View File

@@ -12,6 +12,14 @@ const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
const AWS_REGION = process.env.AWS_REGION;
const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME;
const s3 = new AWS.S3({
s3ForcePathStyle: true,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL),
signatureVersion: 'v4',
});
const hmac = (key: string, message: string, encoding: any) => {
return crypto
.createHmac('sha256', key)
@@ -30,13 +38,17 @@ export const makeCredential = () => {
return credential;
};
export const makePolicy = (credential: string, longDate: string) => {
export const makePolicy = (
credential: string,
longDate: string,
acl: string
) => {
const tomorrow = addHours(new Date(), 24);
const policy = {
conditions: [
{ bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME },
['starts-with', '$key', ''],
{ acl: 'public-read' },
{ acl },
['content-length-range', 0, +process.env.AWS_S3_UPLOAD_MAX_SIZE],
['starts-with', '$Content-Type', 'image'],
['starts-with', '$Cache-Control', ''],
@@ -77,13 +89,11 @@ export const publicS3Endpoint = (isServerUpload?: boolean) => {
}`;
};
export const uploadToS3FromUrl = async (url: string, key: string) => {
const s3 = new AWS.S3({
s3ForcePathStyle: true,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL),
});
export const uploadToS3FromUrl = async (
url: string,
key: string,
acl: string
) => {
invariant(AWS_S3_UPLOAD_BUCKET_NAME, 'AWS_S3_UPLOAD_BUCKET_NAME not set');
try {
@@ -92,7 +102,7 @@ export const uploadToS3FromUrl = async (url: string, key: string) => {
const buffer = await res.buffer();
await s3
.putObject({
ACL: 'public-read',
ACL: acl,
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
ContentType: res.headers['content-type'],
@@ -112,3 +122,36 @@ export const uploadToS3FromUrl = async (url: string, key: string) => {
}
}
};
export const getSignedImageUrl = async (key: string) => {
invariant(AWS_S3_UPLOAD_BUCKET_NAME, 'AWS_S3_UPLOAD_BUCKET_NAME not set');
const isDocker = process.env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/);
const params = {
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
Expires: 60,
};
return isDocker
? `${publicS3Endpoint()}/${key}`
: s3.getSignedUrl('getObject', params);
};
export const getImageByKey = async (key: string) => {
const params = {
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
};
try {
const data = await s3.getObject(params).promise();
return data.Body;
} catch (err) {
if (process.env.NODE_ENV === 'production') {
bugsnag.notify(err);
} else {
throw err;
}
}
};

View File

@@ -3,13 +3,25 @@ import fs from 'fs';
import JSZip from 'jszip';
import tmp from 'tmp';
import unescape from '../../shared/utils/unescape';
import { Collection, Document } from '../models';
import { Attachment, Collection, Document } from '../models';
import { getImageByKey } from './s3';
import bugsnag from 'bugsnag';
async function addToArchive(zip, documents) {
for (const doc of documents) {
const document = await Document.findByPk(doc.id);
let text = unescape(document.text);
zip.file(`${document.title}.md`, unescape(document.text));
const attachments = await Attachment.findAll({
where: { documentId: document.id },
});
for (const attachment of attachments) {
await addImageToArchive(zip, attachment.key);
text = text.replace(attachment.redirectUrl, encodeURI(attachment.key));
}
zip.file(`${document.title}.md`, text);
if (doc.children && doc.children.length) {
const folder = zip.folder(document.title);
@@ -18,6 +30,20 @@ async function addToArchive(zip, documents) {
}
}
async function addImageToArchive(zip, key) {
try {
const img = await getImageByKey(key);
zip.file(key, img, { createFolders: true });
} catch (err) {
if (process.env.NODE_ENV === 'production') {
bugsnag.notify(err);
} else {
// error during file retrieval
console.error(err);
}
}
}
async function archiveToPath(zip) {
return new Promise((resolve, reject) => {
tmp.file({ prefix: 'export-', postfix: '.zip' }, (err, path) => {