From fa04054d283431583e258ffa2be1b47b2c1ad250 Mon Sep 17 00:00:00 2001 From: Cassie Tarakajian Date: Thu, 23 Mar 2017 14:50:47 -0400 Subject: [PATCH] duplicating files works --- client/modules/IDE/actions/project.js | 8 +++++--- server/controllers/aws.controller.js | 6 +++--- server/utils/s3.js | 18 ------------------ 3 files changed, 8 insertions(+), 24 deletions(-) delete mode 100644 server/utils/s3.js diff --git a/client/modules/IDE/actions/project.js b/client/modules/IDE/actions/project.js index 4cbd0102..d5451f94 100644 --- a/client/modules/IDE/actions/project.js +++ b/client/modules/IDE/actions/project.js @@ -1,13 +1,13 @@ import { browserHistory } from 'react-router'; import axios from 'axios'; import objectID from 'bson-objectid'; +import each from 'async/each'; import * as ActionTypes from '../../../constants'; import { showToast, setToastText } from './toast'; import { setUnsavedChanges, justOpenedProject, resetJustOpenedProject, showErrorModal } from './ide'; -import each from 'async/each'; const ROOT_URL = process.env.API_URL; @@ -186,18 +186,20 @@ export function cloneProject() { return { ...file }; }); + // generate new IDS for all files const rootFile = newFiles.find(file => file.name === 'root'); const newRootFileId = objectID().toHexString(); rootFile.id = newRootFileId; rootFile._id = newRootFileId; generateNewIdsForChildren(rootFile, newFiles); - //need to duplicate all files hosted on S3 + + // duplicate all files hosted on S3 each(newFiles, (file, callback) => { if (file.url) { const formParams = { url: file.url }; - axios.post(`${ROOT_URL}/S3/copy`, formParams, {withCredentials: true}) + axios.post(`${ROOT_URL}/S3/copy`, formParams, { withCredentials: true }) .then((response) => { file.url = response.data.url; callback(null); diff --git a/server/controllers/aws.controller.js b/server/controllers/aws.controller.js index 138ca51d..c1936a04 100644 --- a/server/controllers/aws.controller.js +++ b/server/controllers/aws.controller.js @@ -51,10 +51,10 @@ export function copyObjectInS3(req, res) { const params = { Bucket: `${process.env.S3_BUCKET}`, CopySource: `${process.env.S3_BUCKET}/${objectKey}`, - key: newFilename + Key: newFilename }; const copy = client.copyObject(params); - del.on('end', function() { - res.json({url: `${s3Bucket}/${newFilename}`}); + copy.on('end', function() { + res.json({url: `${s3Bucket}${newFilename}`}); }); } diff --git a/server/utils/s3.js b/server/utils/s3.js deleted file mode 100644 index 16fda808..00000000 --- a/server/utils/s3.js +++ /dev/null @@ -1,18 +0,0 @@ -import s3 from 's3'; - -const client = s3.createClient({ - maxAsyncS3: 20, - s3RetryCount: 3, - s3RetryDelay: 1000, - multipartUploadThreshold: 20971520, // this is the default (20 MB) - multipartUploadSize: 15728640, // this is the default (15 MB) - s3Options: { - accessKeyId: `${process.env.AWS_ACCESS_KEY}`, - secretAccessKey: `${process.env.AWS_SECRET_KEY}`, - }, -}); - -export function copyObjectOnS3(url, callback) { - -} -