chore: Introduce AWS_S3_FORCE_PATH_STYLE option to maintain compatability with Minio et al (#1443)
- Make AWS_S3_UPLOAD_BUCKET_NAME optional
This commit is contained in:
@@ -45,6 +45,7 @@ AWS_REGION=xx-xxxx-x
|
|||||||
AWS_S3_UPLOAD_BUCKET_URL=http://s3:4569
|
AWS_S3_UPLOAD_BUCKET_URL=http://s3:4569
|
||||||
AWS_S3_UPLOAD_BUCKET_NAME=bucket_name_here
|
AWS_S3_UPLOAD_BUCKET_NAME=bucket_name_here
|
||||||
AWS_S3_UPLOAD_MAX_SIZE=26214400
|
AWS_S3_UPLOAD_MAX_SIZE=26214400
|
||||||
|
AWS_S3_FORCE_PATH_STYLE=true
|
||||||
# uploaded s3 objects permission level, default is private
|
# uploaded s3 objects permission level, default is private
|
||||||
# set to "public-read" to allow public access
|
# set to "public-read" to allow public access
|
||||||
AWS_S3_ACL=private
|
AWS_S3_ACL=private
|
||||||
|
|||||||
5
app.json
5
app.json
@@ -92,6 +92,11 @@
|
|||||||
"value": "26214400",
|
"value": "26214400",
|
||||||
"required": false
|
"required": false
|
||||||
},
|
},
|
||||||
|
"AWS_S3_FORCE_PATH_STYLE": {
|
||||||
|
"description": "Use path-style URL's for connecting to S3 instead of subdomain. This is useful for S3-compatible storage.",
|
||||||
|
"value": "true",
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
"AWS_REGION": {
|
"AWS_REGION": {
|
||||||
"value": "us-east-1",
|
"value": "us-east-1",
|
||||||
"description": "Region in which the above S3 bucket exists",
|
"description": "Region in which the above S3 bucket exists",
|
||||||
|
|||||||
3
index.js
3
index.js
@@ -17,9 +17,8 @@ if (process.env.AWS_ACCESS_KEY_ID) {
|
|||||||
"AWS_REGION",
|
"AWS_REGION",
|
||||||
"AWS_SECRET_ACCESS_KEY",
|
"AWS_SECRET_ACCESS_KEY",
|
||||||
"AWS_S3_UPLOAD_BUCKET_URL",
|
"AWS_S3_UPLOAD_BUCKET_URL",
|
||||||
"AWS_S3_UPLOAD_BUCKET_NAME",
|
|
||||||
"AWS_S3_UPLOAD_MAX_SIZE",
|
"AWS_S3_UPLOAD_MAX_SIZE",
|
||||||
].forEach(key => {
|
].forEach((key) => {
|
||||||
if (!process.env[key]) {
|
if (!process.env[key]) {
|
||||||
console.error(`The ${key} env variable must be set when using AWS`);
|
console.error(`The ${key} env variable must be set when using AWS`);
|
||||||
// $FlowFixMe
|
// $FlowFixMe
|
||||||
|
|||||||
@@ -4,18 +4,18 @@ import * as Sentry from "@sentry/node";
|
|||||||
import AWS from "aws-sdk";
|
import AWS from "aws-sdk";
|
||||||
import addHours from "date-fns/add_hours";
|
import addHours from "date-fns/add_hours";
|
||||||
import format from "date-fns/format";
|
import format from "date-fns/format";
|
||||||
import invariant from "invariant";
|
|
||||||
import fetch from "isomorphic-fetch";
|
import fetch from "isomorphic-fetch";
|
||||||
|
|
||||||
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
|
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
|
||||||
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
|
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
|
||||||
const AWS_REGION = process.env.AWS_REGION;
|
const AWS_REGION = process.env.AWS_REGION;
|
||||||
const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME;
|
const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME || "";
|
||||||
|
const AWS_S3_FORCE_PATH_STYLE = process.env.AWS_S3_FORCE_PATH_STYLE !== "false";
|
||||||
|
|
||||||
const s3 = new AWS.S3({
|
const s3 = new AWS.S3({
|
||||||
s3ForcePathStyle: true,
|
s3ForcePathStyle: AWS_S3_FORCE_PATH_STYLE,
|
||||||
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
accessKeyId: AWS_ACCESS_KEY_ID,
|
||||||
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
secretAccessKey: AWS_SECRET_ACCESS_KEY,
|
||||||
endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL),
|
endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL),
|
||||||
signatureVersion: "v4",
|
signatureVersion: "v4",
|
||||||
});
|
});
|
||||||
@@ -84,9 +84,9 @@ export const publicS3Endpoint = (isServerUpload?: boolean) => {
|
|||||||
"localhost:"
|
"localhost:"
|
||||||
).replace(/\/$/, "");
|
).replace(/\/$/, "");
|
||||||
|
|
||||||
return `${host}/${isServerUpload && isDocker ? "s3/" : ""}${
|
return `${host}/${
|
||||||
process.env.AWS_S3_UPLOAD_BUCKET_NAME
|
isServerUpload && isDocker ? "s3/" : ""
|
||||||
}`;
|
}${AWS_S3_UPLOAD_BUCKET_NAME}`;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const uploadToS3FromUrl = async (
|
export const uploadToS3FromUrl = async (
|
||||||
@@ -94,8 +94,6 @@ export const uploadToS3FromUrl = async (
|
|||||||
key: string,
|
key: string,
|
||||||
acl: string
|
acl: string
|
||||||
) => {
|
) => {
|
||||||
invariant(AWS_S3_UPLOAD_BUCKET_NAME, "AWS_S3_UPLOAD_BUCKET_NAME not set");
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// $FlowIssue https://github.com/facebook/flow/issues/2171
|
// $FlowIssue https://github.com/facebook/flow/issues/2171
|
||||||
const res = await fetch(url);
|
const res = await fetch(url);
|
||||||
@@ -103,7 +101,7 @@ export const uploadToS3FromUrl = async (
|
|||||||
await s3
|
await s3
|
||||||
.putObject({
|
.putObject({
|
||||||
ACL: acl,
|
ACL: acl,
|
||||||
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
|
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
|
||||||
Key: key,
|
Key: key,
|
||||||
ContentType: res.headers["content-type"],
|
ContentType: res.headers["content-type"],
|
||||||
ContentLength: res.headers["content-length"],
|
ContentLength: res.headers["content-length"],
|
||||||
@@ -126,18 +124,17 @@ export const uploadToS3FromUrl = async (
|
|||||||
export const deleteFromS3 = (key: string) => {
|
export const deleteFromS3 = (key: string) => {
|
||||||
return s3
|
return s3
|
||||||
.deleteObject({
|
.deleteObject({
|
||||||
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
|
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
|
||||||
Key: key,
|
Key: key,
|
||||||
})
|
})
|
||||||
.promise();
|
.promise();
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getSignedImageUrl = async (key: string) => {
|
export const getSignedImageUrl = async (key: string) => {
|
||||||
invariant(AWS_S3_UPLOAD_BUCKET_NAME, "AWS_S3_UPLOAD_BUCKET_NAME not set");
|
|
||||||
const isDocker = process.env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/);
|
const isDocker = process.env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/);
|
||||||
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
|
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
|
||||||
Key: key,
|
Key: key,
|
||||||
Expires: 60,
|
Expires: 60,
|
||||||
};
|
};
|
||||||
@@ -149,7 +146,7 @@ export const getSignedImageUrl = async (key: string) => {
|
|||||||
|
|
||||||
export const getImageByKey = async (key: string) => {
|
export const getImageByKey = async (key: string) => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
|
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
|
||||||
Key: key,
|
Key: key,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user