diff --git a/client/modules/IDE/actions/project.js b/client/modules/IDE/actions/project.js index 8054cb14..4cbd0102 100644 --- a/client/modules/IDE/actions/project.js +++ b/client/modules/IDE/actions/project.js @@ -7,6 +7,7 @@ import { setUnsavedChanges, justOpenedProject, resetJustOpenedProject, showErrorModal } from './ide'; +import each from 'async/each'; const ROOT_URL = process.env.API_URL; @@ -190,23 +191,39 @@ export function cloneProject() { rootFile.id = newRootFileId; rootFile._id = newRootFileId; generateNewIdsForChildren(rootFile, newFiles); - // const newFiles = state.files; - const formParams = Object.assign({}, { name: `${state.project.name} copy` }, { files: newFiles }); - axios.post(`${ROOT_URL}/projects`, formParams, { withCredentials: true }) - .then((response) => { - browserHistory.push(`/${response.data.user.username}/sketches/${response.data.id}`); - console.log(response.data); - dispatch({ - type: ActionTypes.NEW_PROJECT, - project: response.data, - owner: response.data.user, - files: response.data.files - }); - }) - .catch(response => dispatch({ - type: ActionTypes.PROJECT_SAVE_FAIL, - error: response.data - })); + //need to duplicate all files hosted on S3 + each(newFiles, (file, callback) => { + if (file.url) { + const formParams = { + url: file.url + }; + axios.post(`${ROOT_URL}/S3/copy`, formParams, {withCredentials: true}) + .then((response) => { + file.url = response.data.url; + callback(null); + }); + } else { + callback(null); + } + }, (err) => { + // if not errors in duplicating the files on S3, then duplicate it + const formParams = Object.assign({}, { name: `${state.project.name} copy` }, { files: newFiles }); + axios.post(`${ROOT_URL}/projects`, formParams, { withCredentials: true }) + .then((response) => { + browserHistory.push(`/${response.data.user.username}/sketches/${response.data.id}`); + console.log(response.data); + dispatch({ + type: ActionTypes.NEW_PROJECT, + project: response.data, + owner: response.data.user, + files: response.data.files + }); + }) + .catch(response => dispatch({ + type: ActionTypes.PROJECT_SAVE_FAIL, + error: response.data + })); + }); }; } diff --git a/package.json b/package.json index 2f346928..7e0e55f2 100644 --- a/package.json +++ b/package.json @@ -108,6 +108,7 @@ "redux-thunk": "^2.1.0", "request": "^2.76.0", "request-promise": "^4.1.1", + "s3": "^4.4.0", "s3-policy": "^0.2.0", "shortid": "^2.2.6", "srcdoc-polyfill": "^0.2.0", diff --git a/server/controllers/aws.controller.js b/server/controllers/aws.controller.js index 7a1b098c..138ca51d 100644 --- a/server/controllers/aws.controller.js +++ b/server/controllers/aws.controller.js @@ -1,5 +1,20 @@ import uuid from 'node-uuid'; import policy from 's3-policy'; +import s3 from 's3'; + +const client = s3.createClient({ + maxAsyncS3: 20, + s3RetryCount: 3, + s3RetryDelay: 1000, + multipartUploadThreshold: 20971520, // this is the default (20 MB) + multipartUploadSize: 15728640, // this is the default (15 MB) + s3Options: { + accessKeyId: `${process.env.AWS_ACCESS_KEY}`, + secretAccessKey: `${process.env.AWS_SECRET_KEY}`, + }, +}); + +const s3Bucket = `https://s3-us-west-2.amazonaws.com/${process.env.S3_BUCKET}/`; function getExtension(filename) { const i = filename.lastIndexOf('.'); @@ -27,4 +42,19 @@ export function signS3(req, res) { return res.json(result); } -export default signS3; +export function copyObjectInS3(req, res) { + const url = req.body.url; + const objectKey = url.split("/").pop(); + + const fileExtension = getExtension(objectKey); + const newFilename = uuid.v4() + fileExtension; + const params = { + Bucket: `${process.env.S3_BUCKET}`, + CopySource: `${process.env.S3_BUCKET}/${objectKey}`, + key: newFilename + }; + const copy = client.copyObject(params); + del.on('end', function() { + res.json({url: `${s3Bucket}/${newFilename}`}); + }); +} diff --git a/server/routes/aws.routes.js b/server/routes/aws.routes.js index 8f80ca4f..8d8e1462 100644 --- a/server/routes/aws.routes.js +++ b/server/routes/aws.routes.js @@ -4,5 +4,6 @@ import * as AWSController from '../controllers/aws.controller'; const router = new Router(); router.route('/S3/sign').post(AWSController.signS3); +router.route('/S3/copy').post(AWSController.copyObjectInS3); export default router; diff --git a/server/utils/s3.js b/server/utils/s3.js new file mode 100644 index 00000000..16fda808 --- /dev/null +++ b/server/utils/s3.js @@ -0,0 +1,18 @@ +import s3 from 's3'; + +const client = s3.createClient({ + maxAsyncS3: 20, + s3RetryCount: 3, + s3RetryDelay: 1000, + multipartUploadThreshold: 20971520, // this is the default (20 MB) + multipartUploadSize: 15728640, // this is the default (15 MB) + s3Options: { + accessKeyId: `${process.env.AWS_ACCESS_KEY}`, + secretAccessKey: `${process.env.AWS_SECRET_KEY}`, + }, +}); + +export function copyObjectOnS3(url, callback) { + +} +