diff --git a/README.md b/README.md index 70cc1310..01f37499 100644 --- a/README.md +++ b/README.md @@ -99,6 +99,24 @@ If you don't have the full server environment running, you can launch a one-off 1. `$ npm install -g pm2` 2. `$ pm2 start ecosystem.json` +## Optional S3 bucket URL base configuration + +If your S3 bucket is in the US East (N Virginia) region (us-east-1), you'll +need to set a custom URL base for it, because it does not follow the standard +naming pattern as the rest of the regions. Instead, add the following to your +environment/.env file: + +```S3_BUCKET_URL_BASE=https://s3.amazonaws.com``` + +If you've configured your S3 bucket and DNS records to use a custom domain +name, you can also set it using this variable. I.e.: + +```S3_BUCKET_URL_BASE=https://files.mydomain.com``` + +For more information on using a custom domain, see this documentation link: + +http://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html#VirtualHostingCustomURLs + ## Accessibility Guidelines Here is guide on [how to use the accessible editor](https://gist.github.com/MathuraMG/e86666b7b41fbc8c078bad9aff3f666d) diff --git a/client/modules/IDE/actions/uploader.js b/client/modules/IDE/actions/uploader.js index 223a38bd..1ec2208b 100644 --- a/client/modules/IDE/actions/uploader.js +++ b/client/modules/IDE/actions/uploader.js @@ -2,7 +2,8 @@ import axios from 'axios'; import { createFile } from './files'; const textFileRegex = /(text\/|application\/json)/; -const s3BucketHttps = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`; +const s3BucketHttps = process.env.S3_BUCKET_URL_BASE || + `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`; const ROOT_URL = process.env.API_URL; const MAX_LOCAL_FILE_SIZE = 80000; // bytes, aka 80 KB diff --git a/client/modules/IDE/components/FileUploader.jsx b/client/modules/IDE/components/FileUploader.jsx index 0b523efd..9c109764 100644 --- a/client/modules/IDE/components/FileUploader.jsx +++ b/client/modules/IDE/components/FileUploader.jsx @@ -4,7 +4,8 @@ import { bindActionCreators } from 'redux'; import { connect } from 'react-redux'; import * as UploaderActions from '../actions/uploader'; -const s3Bucket = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`; +const s3Bucket = process.env.S3_BUCKET_URL_BASE || + `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`; class FileUploader extends React.Component { componentDidMount() { diff --git a/index.js b/index.js index 6d5ed22e..ae64c308 100644 --- a/index.js +++ b/index.js @@ -4,5 +4,11 @@ if (process.env.NODE_ENV === 'production') { } require('babel-register'); require('babel-polyfill'); -require('dotenv').config(); -require('./server/server'); \ No newline at end of file +let parsed = require('dotenv').config(); +//// in development, let .env values override those in the environment already (i.e. in docker-compose.yml) +if (process.env.NODE_ENV === 'development') { + for (let key in parsed) { + process.env[key] = parsed[key]; + } +} +require('./server/server'); diff --git a/server/controllers/aws.controller.js b/server/controllers/aws.controller.js index 2faf8938..a5e507e8 100644 --- a/server/controllers/aws.controller.js +++ b/server/controllers/aws.controller.js @@ -17,7 +17,8 @@ const client = s3.createClient({ }, }); -const s3Bucket = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`; +const s3Bucket = process.env.S3_BUCKET_URL_BASE || + `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/`; function getExtension(filename) { const i = filename.lastIndexOf('.'); diff --git a/server/migrations/s3UnderUser.js b/server/migrations/s3UnderUser.js index 289d212e..d22317c5 100644 --- a/server/migrations/s3UnderUser.js +++ b/server/migrations/s3UnderUser.js @@ -15,8 +15,8 @@ let client = s3.createClient({ maxAsyncS3: 20, s3RetryCount: 3, s3RetryDelay: 1000, - multipartUploadThreshold: 20971520, // this is the default (20 MB) - multipartUploadSize: 15728640, // this is the default (15 MB) + multipartUploadThreshold: 20971520, // this is the default (20 MB) + multipartUploadSize: 15728640, // this is the default (15 MB) s3Options: { accessKeyId: `${process.env.AWS_ACCESS_KEY}`, secretAccessKey: `${process.env.AWS_SECRET_KEY}`, @@ -43,7 +43,8 @@ Project.find({}, (err, projects) => { console.log(err); }) .on('end', () => { - file.url = `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}/${userId}/${key}`; + file.url = (process.env.S3_BUCKET_URL_BASE || + `https://s3-${process.env.AWS_REGION}.amazonaws.com/${process.env.S3_BUCKET}`) + `/${userId}/${key}`; project.save((err, savedProject) => { console.log(`updated file ${key}`); }); @@ -54,4 +55,4 @@ Project.find({}, (err, projects) => { } }); }); -}); \ No newline at end of file +}); diff --git a/webpack.config.dev.js b/webpack.config.dev.js index 42d6cadf..67127f20 100644 --- a/webpack.config.dev.js +++ b/webpack.config.dev.js @@ -41,6 +41,7 @@ module.exports = { JSON.stringify(false), 'NODE_ENV': JSON.stringify('development'), 'S3_BUCKET': '"' + process.env.S3_BUCKET + '"', + 'S3_BUCKET_URL_BASE': '"' + process.env.S3_BUCKET_URL_BASE + '"', 'AWS_REGION': '"' + process.env.AWS_REGION + '"', } }) diff --git a/webpack.config.prod.js b/webpack.config.prod.js index a860d5c2..e5f9d037 100644 --- a/webpack.config.prod.js +++ b/webpack.config.prod.js @@ -82,6 +82,7 @@ module.exports = { 'API_URL': '"' + process.env.API_URL + '"', 'NODE_ENV': JSON.stringify('production'), 'S3_BUCKET': '"' + process.env.S3_BUCKET + '"', + 'S3_BUCKET_URL_BASE': '"' + process.env.S3_BUCKET_URL_BASE + '"', 'AWS_REGION': '"' + process.env.AWS_REGION + '"' } }),