* Fixes #497 by adding support for an optional new env variable, S3_BUCKET_URL_BASE * Add a section to README about optional S3 bucket URL base configuration.
This commit is contained in:
parent
c820b6258f
commit
76a81bb1a0
8 changed files with 39 additions and 9 deletions
18
README.md
18
README.md
|
@ -99,6 +99,24 @@ If you don't have the full server environment running, you can launch a one-off
|
|||
1. `$ npm install -g pm2`
|
||||
2. `$ pm2 start ecosystem.json`
|
||||
|
||||
## Optional S3 bucket URL base configuration
|
||||
|
||||
If your S3 bucket is in the US East (N Virginia) region (us-east-1), you'll
|
||||
need to set a custom URL base for it, because it does not follow the standard
|
||||
naming pattern as the rest of the regions. Instead, add the following to your
|
||||
environment/.env file:
|
||||
|
||||
```S3_BUCKET_URL_BASE=https://s3.amazonaws.com```
|
||||
|
||||
If you've configured your S3 bucket and DNS records to use a custom domain
|
||||
name, you can also set it using this variable. I.e.:
|
||||
|
||||
```S3_BUCKET_URL_BASE=https://files.mydomain.com```
|
||||
|
||||
For more information on using a custom domain, see this documentation link:
|
||||
|
||||
http://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html#VirtualHostingCustomURLs
|
||||
|
||||
## Accessibility Guidelines
|
||||
|
||||
Here is guide on [how to use the accessible editor](https://gist.github.com/MathuraMG/e86666b7b41fbc8c078bad9aff3f666d)
|
||||
|
|
|
@ -2,7 +2,8 @@ import axios from 'axios';
|
|||
import { createFile } from './files';
|
||||
|
||||
const textFileRegex = /(text\/|application\/json)/;
|
||||
const s3BucketHttps = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`;
|
||||
const s3BucketHttps = process.env.S3_BUCKET_URL_BASE ||
|
||||
`https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`;
|
||||
const ROOT_URL = process.env.API_URL;
|
||||
const MAX_LOCAL_FILE_SIZE = 80000; // bytes, aka 80 KB
|
||||
|
||||
|
|
|
@ -4,7 +4,8 @@ import { bindActionCreators } from 'redux';
|
|||
import { connect } from 'react-redux';
|
||||
import * as UploaderActions from '../actions/uploader';
|
||||
|
||||
const s3Bucket = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`;
|
||||
const s3Bucket = process.env.S3_BUCKET_URL_BASE ||
|
||||
`https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`;
|
||||
|
||||
class FileUploader extends React.Component {
|
||||
componentDidMount() {
|
||||
|
|
10
index.js
10
index.js
|
@ -4,5 +4,11 @@ if (process.env.NODE_ENV === 'production') {
|
|||
}
|
||||
require('babel-register');
|
||||
require('babel-polyfill');
|
||||
require('dotenv').config();
|
||||
require('./server/server');
|
||||
let parsed = require('dotenv').config();
|
||||
//// in development, let .env values override those in the environment already (i.e. in docker-compose.yml)
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
for (let key in parsed) {
|
||||
process.env[key] = parsed[key];
|
||||
}
|
||||
}
|
||||
require('./server/server');
|
||||
|
|
|
@ -17,7 +17,8 @@ const client = s3.createClient({
|
|||
},
|
||||
});
|
||||
|
||||
const s3Bucket = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`;
|
||||
const s3Bucket = process.env.S3_BUCKET_URL_BASE ||
|
||||
`https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`;
|
||||
|
||||
function getExtension(filename) {
|
||||
const i = filename.lastIndexOf('.');
|
||||
|
|
|
@ -15,8 +15,8 @@ let client = s3.createClient({
|
|||
maxAsyncS3: 20,
|
||||
s3RetryCount: 3,
|
||||
s3RetryDelay: 1000,
|
||||
multipartUploadThreshold: 20971520, // this is the default (20 MB)
|
||||
multipartUploadSize: 15728640, // this is the default (15 MB)
|
||||
multipartUploadThreshold: 20971520, // this is the default (20 MB)
|
||||
multipartUploadSize: 15728640, // this is the default (15 MB)
|
||||
s3Options: {
|
||||
accessKeyId: `${process.env.AWS_ACCESS_KEY}`,
|
||||
secretAccessKey: `${process.env.AWS_SECRET_KEY}`,
|
||||
|
@ -43,7 +43,8 @@ Project.find({}, (err, projects) => {
|
|||
console.log(err);
|
||||
})
|
||||
.on('end', () => {
|
||||
file.url = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/${userId}/${key}`;
|
||||
file.url = (process.env.S3_BUCKET_URL_BASE ||
|
||||
`https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}`) + `/${userId}/${key}`;
|
||||
project.save((err, savedProject) => {
|
||||
console.log(`updated file ${key}`);
|
||||
});
|
||||
|
@ -54,4 +55,4 @@ Project.find({}, (err, projects) => {
|
|||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -41,6 +41,7 @@ module.exports = {
|
|||
JSON.stringify(false),
|
||||
'NODE_ENV': JSON.stringify('development'),
|
||||
'S3_BUCKET': '"' + process.env.S3_BUCKET + '"',
|
||||
'S3_BUCKET_URL_BASE': '"' + process.env.S3_BUCKET_URL_BASE + '"',
|
||||
'AWS_REGION': '"' + process.env.AWS_REGION + '"',
|
||||
}
|
||||
})
|
||||
|
|
|
@ -82,6 +82,7 @@ module.exports = {
|
|||
'API_URL': '"' + process.env.API_URL + '"',
|
||||
'NODE_ENV': JSON.stringify('production'),
|
||||
'S3_BUCKET': '"' + process.env.S3_BUCKET + '"',
|
||||
'S3_BUCKET_URL_BASE': '"' + process.env.S3_BUCKET_URL_BASE + '"',
|
||||
'AWS_REGION': '"' + process.env.AWS_REGION + '"'
|
||||
}
|
||||
}),
|
||||
|
|
Loading…
Reference in a new issue