Fixes #497 by adding support for an optional new env variable, S3_BUCKET_URL_BASE (#499)

* Fixes #497 by adding support for an optional new env variable, S3_BUCKET_URL_BASE

* Add a section to README about optional S3 bucket URL base configuration.
This commit is contained in:
Francis Li 2017-12-13 11:56:24 -08:00 committed by Cassie Tarakajian
parent c820b6258f
commit 76a81bb1a0
8 changed files with 39 additions and 9 deletions

View file

@ -99,6 +99,24 @@ If you don't have the full server environment running, you can launch a one-off
1. `$ npm install -g pm2` 1. `$ npm install -g pm2`
2. `$ pm2 start ecosystem.json` 2. `$ pm2 start ecosystem.json`
## Optional S3 bucket URL base configuration
If your S3 bucket is in the US East (N Virginia) region (us-east-1), you'll
need to set a custom URL base for it, because it does not follow the standard
naming pattern as the rest of the regions. Instead, add the following to your
environment/.env file:
```S3_BUCKET_URL_BASE=https://s3.amazonaws.com```
If you've configured your S3 bucket and DNS records to use a custom domain
name, you can also set it using this variable. I.e.:
```S3_BUCKET_URL_BASE=https://files.mydomain.com```
For more information on using a custom domain, see this documentation link:
http://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html#VirtualHostingCustomURLs
## Accessibility Guidelines ## Accessibility Guidelines
Here is guide on [how to use the accessible editor](https://gist.github.com/MathuraMG/e86666b7b41fbc8c078bad9aff3f666d) Here is guide on [how to use the accessible editor](https://gist.github.com/MathuraMG/e86666b7b41fbc8c078bad9aff3f666d)

View file

@ -2,7 +2,8 @@ import axios from 'axios';
import { createFile } from './files'; import { createFile } from './files';
const textFileRegex = /(text\/|application\/json)/; const textFileRegex = /(text\/|application\/json)/;
const s3BucketHttps = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`; const s3BucketHttps = process.env.S3_BUCKET_URL_BASE ||
`https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`;
const ROOT_URL = process.env.API_URL; const ROOT_URL = process.env.API_URL;
const MAX_LOCAL_FILE_SIZE = 80000; // bytes, aka 80 KB const MAX_LOCAL_FILE_SIZE = 80000; // bytes, aka 80 KB

View file

@ -4,7 +4,8 @@ import { bindActionCreators } from 'redux';
import { connect } from 'react-redux'; import { connect } from 'react-redux';
import * as UploaderActions from '../actions/uploader'; import * as UploaderActions from '../actions/uploader';
const s3Bucket = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`; const s3Bucket = process.env.S3_BUCKET_URL_BASE ||
`https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`;
class FileUploader extends React.Component { class FileUploader extends React.Component {
componentDidMount() { componentDidMount() {

View file

@ -4,5 +4,11 @@ if (process.env.NODE_ENV === 'production') {
} }
require('babel-register'); require('babel-register');
require('babel-polyfill'); require('babel-polyfill');
require('dotenv').config(); let parsed = require('dotenv').config();
require('./server/server'); //// in development, let .env values override those in the environment already (i.e. in docker-compose.yml)
if (process.env.NODE_ENV === 'development') {
for (let key in parsed) {
process.env[key] = parsed[key];
}
}
require('./server/server');

View file

@ -17,7 +17,8 @@ const client = s3.createClient({
}, },
}); });
const s3Bucket = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`; const s3Bucket = process.env.S3_BUCKET_URL_BASE ||
`https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`;
function getExtension(filename) { function getExtension(filename) {
const i = filename.lastIndexOf('.'); const i = filename.lastIndexOf('.');

View file

@ -15,8 +15,8 @@ let client = s3.createClient({
maxAsyncS3: 20, maxAsyncS3: 20,
s3RetryCount: 3, s3RetryCount: 3,
s3RetryDelay: 1000, s3RetryDelay: 1000,
multipartUploadThreshold: 20971520, // this is the default (20 MB) multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB) multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: { s3Options: {
accessKeyId: `${process.env.AWS_ACCESS_KEY}`, accessKeyId: `${process.env.AWS_ACCESS_KEY}`,
secretAccessKey: `${process.env.AWS_SECRET_KEY}`, secretAccessKey: `${process.env.AWS_SECRET_KEY}`,
@ -43,7 +43,8 @@ Project.find({}, (err, projects) => {
console.log(err); console.log(err);
}) })
.on('end', () => { .on('end', () => {
file.url = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/${userId}/${key}`; file.url = (process.env.S3_BUCKET_URL_BASE ||
`https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}`) + `/${userId}/${key}`;
project.save((err, savedProject) => { project.save((err, savedProject) => {
console.log(`updated file ${key}`); console.log(`updated file ${key}`);
}); });
@ -54,4 +55,4 @@ Project.find({}, (err, projects) => {
} }
}); });
}); });
}); });

View file

@ -41,6 +41,7 @@ module.exports = {
JSON.stringify(false), JSON.stringify(false),
'NODE_ENV': JSON.stringify('development'), 'NODE_ENV': JSON.stringify('development'),
'S3_BUCKET': '"' + process.env.S3_BUCKET + '"', 'S3_BUCKET': '"' + process.env.S3_BUCKET + '"',
'S3_BUCKET_URL_BASE': '"' + process.env.S3_BUCKET_URL_BASE + '"',
'AWS_REGION': '"' + process.env.AWS_REGION + '"', 'AWS_REGION': '"' + process.env.AWS_REGION + '"',
} }
}) })

View file

@ -82,6 +82,7 @@ module.exports = {
'API_URL': '"' + process.env.API_URL + '"', 'API_URL': '"' + process.env.API_URL + '"',
'NODE_ENV': JSON.stringify('production'), 'NODE_ENV': JSON.stringify('production'),
'S3_BUCKET': '"' + process.env.S3_BUCKET + '"', 'S3_BUCKET': '"' + process.env.S3_BUCKET + '"',
'S3_BUCKET_URL_BASE': '"' + process.env.S3_BUCKET_URL_BASE + '"',
'AWS_REGION': '"' + process.env.AWS_REGION + '"' 'AWS_REGION': '"' + process.env.AWS_REGION + '"'
} }
}), }),