diff --git a/.env.sample b/.env.sample index e1dd0f54c..cff49326b 100644 --- a/.env.sample +++ b/.env.sample @@ -49,6 +49,9 @@ AWS_REGION=xx-xxxx-x AWS_S3_UPLOAD_BUCKET_URL=http://s3:4569 AWS_S3_UPLOAD_BUCKET_NAME=bucket_name_here AWS_S3_UPLOAD_MAX_SIZE=26214400 +# uploaded s3 objects permission level, default is private +# set to "public-read" to allow public access +AWS_S3_ACL=private # Emails configuration (optional) SMTP_HOST= diff --git a/app/scenes/Settings/components/ImageUpload.js b/app/scenes/Settings/components/ImageUpload.js index a8695c589..7df8d3b97 100644 --- a/app/scenes/Settings/components/ImageUpload.js +++ b/app/scenes/Settings/components/ImageUpload.js @@ -49,10 +49,11 @@ class DropToImport extends React.Component { const canvas = this.avatarEditorRef.getImage(); const imageBlob = dataUrlToBlob(canvas.toDataURL()); try { - const asset = await uploadFile(imageBlob, { + const attachment = await uploadFile(imageBlob, { name: this.file.name, + public: true, }); - this.props.onSuccess(asset.url); + this.props.onSuccess(attachment.url); } catch (err) { this.props.onError(err.message); } finally { diff --git a/app/utils/uploadFile.js b/app/utils/uploadFile.js index 8e158de5f..6bed5b34a 100644 --- a/app/utils/uploadFile.js +++ b/app/utils/uploadFile.js @@ -5,6 +5,7 @@ import invariant from 'invariant'; type Options = { name?: string, documentId?: string, + public?: boolean, }; export const uploadFile = async ( @@ -13,6 +14,7 @@ export const uploadFile = async ( ) => { const name = file instanceof File ? file.name : options.name; const response = await client.post('/users.s3Upload', { + public: options.public, documentId: options.documentId, contentType: file.type, size: file.size, diff --git a/server/api/attachments.js b/server/api/attachments.js new file mode 100644 index 000000000..6fa3b3ef1 --- /dev/null +++ b/server/api/attachments.js @@ -0,0 +1,32 @@ +// @flow +import Router from 'koa-router'; +import auth from '../middlewares/authentication'; +import { Attachment, Document } from '../models'; +import { getSignedImageUrl } from '../utils/s3'; + +import policy from '../policies'; + +const { authorize } = policy; +const router = new Router(); + +router.post('attachments.redirect', auth(), async ctx => { + const { id } = ctx.body; + ctx.assertPresent(id, 'id is required'); + + const user = ctx.state.user; + const attachment = await Attachment.findByPk(id); + + if (attachment.isPrivate) { + const document = await Document.findByPk(attachment.documentId, { + userId: user.id, + }); + authorize(user, 'read', document); + + const accessUrl = await getSignedImageUrl(attachment.key); + ctx.redirect(accessUrl); + } else { + ctx.redirect(attachment.url); + } +}); + +export default router; diff --git a/server/api/attachments.test.js b/server/api/attachments.test.js new file mode 100644 index 000000000..7ed1919cc --- /dev/null +++ b/server/api/attachments.test.js @@ -0,0 +1,86 @@ +/* eslint-disable flowtype/require-valid-file-annotation */ +import TestServer from 'fetch-test-server'; +import app from '../app'; +import { flushdb } from '../test/support'; +import { + buildUser, + buildCollection, + buildAttachment, + buildDocument, +} from '../test/factories'; + +const server = new TestServer(app.callback()); + +beforeEach(flushdb); +afterAll(server.close); + +describe('#attachments.redirect', async () => { + it('should require authentication', async () => { + const res = await server.post('/api/attachments.redirect'); + expect(res.status).toEqual(401); + }); + + it('should return a redirect for an attachment belonging to a document user has access to', async () => { + const user = await buildUser(); + const attachment = await buildAttachment({ + teamId: user.teamId, + userId: user.id, + }); + const res = await server.post('/api/attachments.redirect', { + body: { token: user.getJwtToken(), id: attachment.id }, + redirect: 'manual', + }); + + expect(res.status).toEqual(302); + }); + + it('should always return a redirect for a public attachment', async () => { + const user = await buildUser(); + const collection = await buildCollection({ + teamId: user.teamId, + userId: user.id, + private: true, + }); + const document = await buildDocument({ + teamId: user.teamId, + userId: user.id, + collectionId: collection.id, + }); + const attachment = await buildAttachment({ + teamId: user.teamId, + userId: user.id, + documentId: document.id, + }); + + const res = await server.post('/api/attachments.redirect', { + body: { token: user.getJwtToken(), id: attachment.id }, + redirect: 'manual', + }); + + expect(res.status).toEqual(302); + }); + + it('should not return a redirect for a private attachment belonging to a document user does not have access to', async () => { + const user = await buildUser(); + const collection = await buildCollection({ + private: true, + }); + const document = await buildDocument({ + teamId: collection.teamId, + userId: collection.userId, + collectionId: collection.id, + }); + const attachment = await buildAttachment({ + teamId: document.teamId, + userId: document.userId, + documentId: document.id, + acl: 'private', + }); + + const res = await server.post('/api/attachments.redirect', { + body: { token: user.getJwtToken(), id: attachment.id }, + }); + + expect(res.status).toEqual(403); + }); +}); diff --git a/server/api/index.js b/server/api/index.js index bb3e1fd52..a5210d76a 100644 --- a/server/api/index.js +++ b/server/api/index.js @@ -16,6 +16,7 @@ import team from './team'; import integrations from './integrations'; import notificationSettings from './notificationSettings'; import utils from './utils'; +import attachments from './attachments'; import { NotFoundError } from '../errors'; import errorHandling from '../middlewares/errorHandling'; @@ -48,6 +49,7 @@ router.use('/', shares.routes()); router.use('/', team.routes()); router.use('/', integrations.routes()); router.use('/', notificationSettings.routes()); +router.use('/', attachments.routes()); router.use('/', utils.routes()); router.post('*', ctx => { ctx.throw(new NotFoundError('Endpoint not found')); diff --git a/server/api/team.js b/server/api/team.js index efa01171a..471fec290 100644 --- a/server/api/team.js +++ b/server/api/team.js @@ -1,7 +1,6 @@ // @flow import Router from 'koa-router'; import { Team } from '../models'; -import { publicS3Endpoint } from '../utils/s3'; import auth from '../middlewares/authentication'; import { presentTeam, presentPolicies } from '../presenters'; @@ -19,8 +18,6 @@ router.post('team.update', auth(), async ctx => { guestSignin, documentEmbeds, } = ctx.body; - const endpoint = publicS3Endpoint(); - const user = ctx.state.user; const team = await Team.findByPk(user.teamId); authorize(user, 'update', team); @@ -33,9 +30,7 @@ router.post('team.update', auth(), async ctx => { if (sharing !== undefined) team.sharing = sharing; if (documentEmbeds !== undefined) team.documentEmbeds = documentEmbeds; if (guestSignin !== undefined) team.guestSignin = guestSignin; - if (avatarUrl && avatarUrl.startsWith(`${endpoint}/uploads/${user.id}`)) { - team.avatarUrl = avatarUrl; - } + if (avatarUrl !== undefined) team.avatarUrl = avatarUrl; await team.save(); ctx.body = { diff --git a/server/api/users.js b/server/api/users.js index 8c9f8ba6f..f7e770fc4 100644 --- a/server/api/users.js +++ b/server/api/users.js @@ -17,6 +17,7 @@ import userInviter from '../commands/userInviter'; import { presentUser } from '../presenters'; import policy from '../policies'; +const AWS_S3_ACL = process.env.AWS_S3_ACL || 'private'; const { authorize } = policy; const router = new Router(); @@ -61,12 +62,9 @@ router.post('users.info', auth(), async ctx => { router.post('users.update', auth(), async ctx => { const { user } = ctx.state; const { name, avatarUrl } = ctx.body; - const endpoint = publicS3Endpoint(); if (name) user.name = name; - if (avatarUrl && avatarUrl.startsWith(`${endpoint}/uploads/${user.id}`)) { - user.avatarUrl = avatarUrl; - } + if (avatarUrl) user.avatarUrl = avatarUrl; await user.save(); @@ -89,14 +87,19 @@ router.post('users.s3Upload', auth(), async ctx => { const { user } = ctx.state; const s3Key = uuid.v4(); const key = `uploads/${user.id}/${s3Key}/${name}`; + const acl = + ctx.body.public === undefined + ? AWS_S3_ACL + : ctx.body.public ? 'public-read' : 'private'; const credential = makeCredential(); const longDate = format(new Date(), 'YYYYMMDDTHHmmss\\Z'); - const policy = makePolicy(credential, longDate); + const policy = makePolicy(credential, longDate, acl); const endpoint = publicS3Endpoint(); const url = `${endpoint}/${key}`; - await Attachment.create({ + const attachment = await Attachment.create({ key, + acl, size, url, contentType, @@ -120,7 +123,7 @@ router.post('users.s3Upload', auth(), async ctx => { form: { 'Cache-Control': 'max-age=31557600', 'Content-Type': contentType, - acl: 'public-read', + acl, key, policy, 'x-amz-algorithm': 'AWS4-HMAC-SHA256', @@ -131,7 +134,7 @@ router.post('users.s3Upload', auth(), async ctx => { asset: { contentType, name, - url, + url: attachment.redirectUrl, size, }, }, diff --git a/server/models/Attachment.js b/server/models/Attachment.js index e3a13a8b3..eb99cddd2 100644 --- a/server/models/Attachment.js +++ b/server/models/Attachment.js @@ -40,6 +40,12 @@ const Attachment = sequelize.define( name: function() { return path.parse(this.key).base; }, + redirectUrl: function() { + return `/api/attachments.redirect?id=${this.id}`; + }, + isPrivate: function() { + return this.acl === 'private'; + }, }, } ); diff --git a/server/models/Team.js b/server/models/Team.js index d11615ab7..0dc8699ff 100644 --- a/server/models/Team.js +++ b/server/models/Team.js @@ -91,12 +91,18 @@ Team.associate = models => { const uploadAvatar = async model => { const endpoint = publicS3Endpoint(); + const { avatarUrl } = model; - if (model.avatarUrl && !model.avatarUrl.startsWith(endpoint)) { + if ( + avatarUrl && + !avatarUrl.startsWith('/api') && + !avatarUrl.startsWith(endpoint) + ) { try { const newUrl = await uploadToS3FromUrl( - model.avatarUrl, - `avatars/${model.id}/${uuid.v4()}` + avatarUrl, + `avatars/${model.id}/${uuid.v4()}`, + 'public-read' ); if (newUrl) model.avatarUrl = newUrl; } catch (err) { diff --git a/server/models/User.js b/server/models/User.js index 0717d92f8..5069c0264 100644 --- a/server/models/User.js +++ b/server/models/User.js @@ -128,14 +128,21 @@ const uploadAvatar = async model => { if ( avatarUrl && + !avatarUrl.startsWith('/api') && !avatarUrl.startsWith(endpoint) && !avatarUrl.startsWith(DEFAULT_AVATAR_HOST) ) { - const newUrl = await uploadToS3FromUrl( - avatarUrl, - `avatars/${model.id}/${uuid.v4()}` - ); - if (newUrl) model.avatarUrl = newUrl; + try { + const newUrl = await uploadToS3FromUrl( + avatarUrl, + `avatars/${model.id}/${uuid.v4()}`, + 'public-read' + ); + if (newUrl) model.avatarUrl = newUrl; + } catch (err) { + // we can try again next time + console.error(err); + } } }; diff --git a/server/presenters/document.js b/server/presenters/document.js index 83f10af6b..1f95420b7 100644 --- a/server/presenters/document.js +++ b/server/presenters/document.js @@ -1,24 +1,46 @@ // @flow import { takeRight } from 'lodash'; -import { User, Document } from '../models'; +import { User, Document, Attachment } from '../models'; +import { getSignedImageUrl } from '../utils/s3'; import presentUser from './user'; type Options = { isPublic?: boolean, }; +const attachmentRegex = /!\[.*\]\(\/api\/attachments\.redirect\?id=(?.*)\)/gi; + +// replaces attachments.redirect urls with signed/authenticated url equivalents +async function replaceImageAttachments(text) { + const attachmentIds = [...text.matchAll(attachmentRegex)].map( + match => match.groups && match.groups.id + ); + + for (const id of attachmentIds) { + const attachment = await Attachment.findByPk(id); + const accessUrl = await getSignedImageUrl(attachment.key); + text = text.replace(attachment.redirectUrl, accessUrl); + } + + return text; +} + export default async function present(document: Document, options: ?Options) { options = { isPublic: false, ...options, }; + const text = options.isPublic + ? await replaceImageAttachments(document.text) + : document.text; + const data = { id: document.id, url: document.url, urlId: document.urlId, title: document.title, - text: document.text, + text, emoji: document.emoji, createdAt: document.createdAt, createdBy: undefined, diff --git a/server/test/factories.js b/server/test/factories.js index 5850b53b4..1f8acb6cc 100644 --- a/server/test/factories.js +++ b/server/test/factories.js @@ -1,5 +1,13 @@ // @flow -import { Share, Team, User, Event, Document, Collection } from '../models'; +import { + Share, + Team, + User, + Event, + Document, + Collection, + Attachment, +} from '../models'; import uuid from 'uuid'; let count = 0; @@ -104,3 +112,38 @@ export async function buildDocument(overrides: Object = {}) { ...overrides, }); } + +export async function buildAttachment(overrides: Object = {}) { + count++; + + if (!overrides.teamId) { + const team = await buildTeam(); + overrides.teamId = team.id; + } + + if (!overrides.userId) { + const user = await buildUser(); + overrides.userId = user.id; + } + + if (!overrides.collectionId) { + const collection = await buildCollection(overrides); + overrides.collectionId = collection.id; + } + + if (!overrides.documentId) { + const document = await buildDocument(overrides); + overrides.documentId = document.id; + } + + return Attachment.create({ + key: `uploads/key/to/file ${count}.png`, + url: `https://redirect.url.com/uploads/key/to/file ${count}.png`, + contentType: 'image/png', + size: 100, + acl: 'public-read', + createdAt: new Date('2018-01-02T00:00:00.000Z'), + updatedAt: new Date('2018-01-02T00:00:00.000Z'), + ...overrides, + }); +} diff --git a/server/utils/s3.js b/server/utils/s3.js index 778942727..e0f0846ce 100644 --- a/server/utils/s3.js +++ b/server/utils/s3.js @@ -12,6 +12,14 @@ const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID; const AWS_REGION = process.env.AWS_REGION; const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME; +const s3 = new AWS.S3({ + s3ForcePathStyle: true, + accessKeyId: process.env.AWS_ACCESS_KEY_ID, + secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, + endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL), + signatureVersion: 'v4', +}); + const hmac = (key: string, message: string, encoding: any) => { return crypto .createHmac('sha256', key) @@ -30,13 +38,17 @@ export const makeCredential = () => { return credential; }; -export const makePolicy = (credential: string, longDate: string) => { +export const makePolicy = ( + credential: string, + longDate: string, + acl: string +) => { const tomorrow = addHours(new Date(), 24); const policy = { conditions: [ { bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME }, ['starts-with', '$key', ''], - { acl: 'public-read' }, + { acl }, ['content-length-range', 0, +process.env.AWS_S3_UPLOAD_MAX_SIZE], ['starts-with', '$Content-Type', 'image'], ['starts-with', '$Cache-Control', ''], @@ -77,13 +89,11 @@ export const publicS3Endpoint = (isServerUpload?: boolean) => { }`; }; -export const uploadToS3FromUrl = async (url: string, key: string) => { - const s3 = new AWS.S3({ - s3ForcePathStyle: true, - accessKeyId: process.env.AWS_ACCESS_KEY_ID, - secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, - endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL), - }); +export const uploadToS3FromUrl = async ( + url: string, + key: string, + acl: string +) => { invariant(AWS_S3_UPLOAD_BUCKET_NAME, 'AWS_S3_UPLOAD_BUCKET_NAME not set'); try { @@ -92,7 +102,7 @@ export const uploadToS3FromUrl = async (url: string, key: string) => { const buffer = await res.buffer(); await s3 .putObject({ - ACL: 'public-read', + ACL: acl, Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME, Key: key, ContentType: res.headers['content-type'], @@ -112,3 +122,36 @@ export const uploadToS3FromUrl = async (url: string, key: string) => { } } }; + +export const getSignedImageUrl = async (key: string) => { + invariant(AWS_S3_UPLOAD_BUCKET_NAME, 'AWS_S3_UPLOAD_BUCKET_NAME not set'); + const isDocker = process.env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/); + + const params = { + Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME, + Key: key, + Expires: 60, + }; + + return isDocker + ? `${publicS3Endpoint()}/${key}` + : s3.getSignedUrl('getObject', params); +}; + +export const getImageByKey = async (key: string) => { + const params = { + Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME, + Key: key, + }; + + try { + const data = await s3.getObject(params).promise(); + return data.Body; + } catch (err) { + if (process.env.NODE_ENV === 'production') { + bugsnag.notify(err); + } else { + throw err; + } + } +}; diff --git a/server/utils/zip.js b/server/utils/zip.js index 3b11d2aec..018e22b35 100644 --- a/server/utils/zip.js +++ b/server/utils/zip.js @@ -3,13 +3,25 @@ import fs from 'fs'; import JSZip from 'jszip'; import tmp from 'tmp'; import unescape from '../../shared/utils/unescape'; -import { Collection, Document } from '../models'; +import { Attachment, Collection, Document } from '../models'; +import { getImageByKey } from './s3'; +import bugsnag from 'bugsnag'; async function addToArchive(zip, documents) { for (const doc of documents) { const document = await Document.findByPk(doc.id); + let text = unescape(document.text); - zip.file(`${document.title}.md`, unescape(document.text)); + const attachments = await Attachment.findAll({ + where: { documentId: document.id }, + }); + + for (const attachment of attachments) { + await addImageToArchive(zip, attachment.key); + text = text.replace(attachment.redirectUrl, encodeURI(attachment.key)); + } + + zip.file(`${document.title}.md`, text); if (doc.children && doc.children.length) { const folder = zip.folder(document.title); @@ -18,6 +30,20 @@ async function addToArchive(zip, documents) { } } +async function addImageToArchive(zip, key) { + try { + const img = await getImageByKey(key); + zip.file(key, img, { createFolders: true }); + } catch (err) { + if (process.env.NODE_ENV === 'production') { + bugsnag.notify(err); + } else { + // error during file retrieval + console.error(err); + } + } +} + async function archiveToPath(zip) { return new Promise((resolve, reject) => { tmp.file({ prefix: 'export-', postfix: '.zip' }, (err, path) => {