initial commit to cloning files on S3, untested

This commit is contained in:
Cassie Tarakajian 2017-03-23 14:23:54 -04:00
parent 8e82fe96c3
commit f01a58353b
5 changed files with 85 additions and 18 deletions

View file

@ -7,6 +7,7 @@ import { setUnsavedChanges,
justOpenedProject, justOpenedProject,
resetJustOpenedProject, resetJustOpenedProject,
showErrorModal } from './ide'; showErrorModal } from './ide';
import each from 'async/each';
const ROOT_URL = process.env.API_URL; const ROOT_URL = process.env.API_URL;
@ -190,23 +191,39 @@ export function cloneProject() {
rootFile.id = newRootFileId; rootFile.id = newRootFileId;
rootFile._id = newRootFileId; rootFile._id = newRootFileId;
generateNewIdsForChildren(rootFile, newFiles); generateNewIdsForChildren(rootFile, newFiles);
// const newFiles = state.files; //need to duplicate all files hosted on S3
const formParams = Object.assign({}, { name: `${state.project.name} copy` }, { files: newFiles }); each(newFiles, (file, callback) => {
axios.post(`${ROOT_URL}/projects`, formParams, { withCredentials: true }) if (file.url) {
.then((response) => { const formParams = {
browserHistory.push(`/${response.data.user.username}/sketches/${response.data.id}`); url: file.url
console.log(response.data); };
dispatch({ axios.post(`${ROOT_URL}/S3/copy`, formParams, {withCredentials: true})
type: ActionTypes.NEW_PROJECT, .then((response) => {
project: response.data, file.url = response.data.url;
owner: response.data.user, callback(null);
files: response.data.files });
}); } else {
}) callback(null);
.catch(response => dispatch({ }
type: ActionTypes.PROJECT_SAVE_FAIL, }, (err) => {
error: response.data // if not errors in duplicating the files on S3, then duplicate it
})); const formParams = Object.assign({}, { name: `${state.project.name} copy` }, { files: newFiles });
axios.post(`${ROOT_URL}/projects`, formParams, { withCredentials: true })
.then((response) => {
browserHistory.push(`/${response.data.user.username}/sketches/${response.data.id}`);
console.log(response.data);
dispatch({
type: ActionTypes.NEW_PROJECT,
project: response.data,
owner: response.data.user,
files: response.data.files
});
})
.catch(response => dispatch({
type: ActionTypes.PROJECT_SAVE_FAIL,
error: response.data
}));
});
}; };
} }

View file

@ -108,6 +108,7 @@
"redux-thunk": "^2.1.0", "redux-thunk": "^2.1.0",
"request": "^2.76.0", "request": "^2.76.0",
"request-promise": "^4.1.1", "request-promise": "^4.1.1",
"s3": "^4.4.0",
"s3-policy": "^0.2.0", "s3-policy": "^0.2.0",
"shortid": "^2.2.6", "shortid": "^2.2.6",
"srcdoc-polyfill": "^0.2.0", "srcdoc-polyfill": "^0.2.0",

View file

@ -1,5 +1,20 @@
import uuid from 'node-uuid'; import uuid from 'node-uuid';
import policy from 's3-policy'; import policy from 's3-policy';
import s3 from 's3';
const client = s3.createClient({
maxAsyncS3: 20,
s3RetryCount: 3,
s3RetryDelay: 1000,
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: `${process.env.AWS_ACCESS_KEY}`,
secretAccessKey: `${process.env.AWS_SECRET_KEY}`,
},
});
const s3Bucket = `https://s3-us-west-2.amazonaws.com/${process.env.S3_BUCKET}/`;
function getExtension(filename) { function getExtension(filename) {
const i = filename.lastIndexOf('.'); const i = filename.lastIndexOf('.');
@ -27,4 +42,19 @@ export function signS3(req, res) {
return res.json(result); return res.json(result);
} }
export default signS3; export function copyObjectInS3(req, res) {
const url = req.body.url;
const objectKey = url.split("/").pop();
const fileExtension = getExtension(objectKey);
const newFilename = uuid.v4() + fileExtension;
const params = {
Bucket: `${process.env.S3_BUCKET}`,
CopySource: `${process.env.S3_BUCKET}/${objectKey}`,
key: newFilename
};
const copy = client.copyObject(params);
del.on('end', function() {
res.json({url: `${s3Bucket}/${newFilename}`});
});
}

View file

@ -4,5 +4,6 @@ import * as AWSController from '../controllers/aws.controller';
const router = new Router(); const router = new Router();
router.route('/S3/sign').post(AWSController.signS3); router.route('/S3/sign').post(AWSController.signS3);
router.route('/S3/copy').post(AWSController.copyObjectInS3);
export default router; export default router;

18
server/utils/s3.js Normal file
View file

@ -0,0 +1,18 @@
import s3 from 's3';
const client = s3.createClient({
maxAsyncS3: 20,
s3RetryCount: 3,
s3RetryDelay: 1000,
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: `${process.env.AWS_ACCESS_KEY}`,
secretAccessKey: `${process.env.AWS_SECRET_KEY}`,
},
});
export function copyObjectOnS3(url, callback) {
}