initial commit to cloning files on S3, untested
This commit is contained in:
parent
8e82fe96c3
commit
f01a58353b
5 changed files with 85 additions and 18 deletions
|
@ -7,6 +7,7 @@ import { setUnsavedChanges,
|
|||
justOpenedProject,
|
||||
resetJustOpenedProject,
|
||||
showErrorModal } from './ide';
|
||||
import each from 'async/each';
|
||||
|
||||
const ROOT_URL = process.env.API_URL;
|
||||
|
||||
|
@ -190,23 +191,39 @@ export function cloneProject() {
|
|||
rootFile.id = newRootFileId;
|
||||
rootFile._id = newRootFileId;
|
||||
generateNewIdsForChildren(rootFile, newFiles);
|
||||
// const newFiles = state.files;
|
||||
const formParams = Object.assign({}, { name: `${state.project.name} copy` }, { files: newFiles });
|
||||
axios.post(`${ROOT_URL}/projects`, formParams, { withCredentials: true })
|
||||
.then((response) => {
|
||||
browserHistory.push(`/${response.data.user.username}/sketches/${response.data.id}`);
|
||||
console.log(response.data);
|
||||
dispatch({
|
||||
type: ActionTypes.NEW_PROJECT,
|
||||
project: response.data,
|
||||
owner: response.data.user,
|
||||
files: response.data.files
|
||||
});
|
||||
})
|
||||
.catch(response => dispatch({
|
||||
type: ActionTypes.PROJECT_SAVE_FAIL,
|
||||
error: response.data
|
||||
}));
|
||||
//need to duplicate all files hosted on S3
|
||||
each(newFiles, (file, callback) => {
|
||||
if (file.url) {
|
||||
const formParams = {
|
||||
url: file.url
|
||||
};
|
||||
axios.post(`${ROOT_URL}/S3/copy`, formParams, {withCredentials: true})
|
||||
.then((response) => {
|
||||
file.url = response.data.url;
|
||||
callback(null);
|
||||
});
|
||||
} else {
|
||||
callback(null);
|
||||
}
|
||||
}, (err) => {
|
||||
// if not errors in duplicating the files on S3, then duplicate it
|
||||
const formParams = Object.assign({}, { name: `${state.project.name} copy` }, { files: newFiles });
|
||||
axios.post(`${ROOT_URL}/projects`, formParams, { withCredentials: true })
|
||||
.then((response) => {
|
||||
browserHistory.push(`/${response.data.user.username}/sketches/${response.data.id}`);
|
||||
console.log(response.data);
|
||||
dispatch({
|
||||
type: ActionTypes.NEW_PROJECT,
|
||||
project: response.data,
|
||||
owner: response.data.user,
|
||||
files: response.data.files
|
||||
});
|
||||
})
|
||||
.catch(response => dispatch({
|
||||
type: ActionTypes.PROJECT_SAVE_FAIL,
|
||||
error: response.data
|
||||
}));
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -108,6 +108,7 @@
|
|||
"redux-thunk": "^2.1.0",
|
||||
"request": "^2.76.0",
|
||||
"request-promise": "^4.1.1",
|
||||
"s3": "^4.4.0",
|
||||
"s3-policy": "^0.2.0",
|
||||
"shortid": "^2.2.6",
|
||||
"srcdoc-polyfill": "^0.2.0",
|
||||
|
|
|
@ -1,5 +1,20 @@
|
|||
import uuid from 'node-uuid';
|
||||
import policy from 's3-policy';
|
||||
import s3 from 's3';
|
||||
|
||||
const client = s3.createClient({
|
||||
maxAsyncS3: 20,
|
||||
s3RetryCount: 3,
|
||||
s3RetryDelay: 1000,
|
||||
multipartUploadThreshold: 20971520, // this is the default (20 MB)
|
||||
multipartUploadSize: 15728640, // this is the default (15 MB)
|
||||
s3Options: {
|
||||
accessKeyId: `${process.env.AWS_ACCESS_KEY}`,
|
||||
secretAccessKey: `${process.env.AWS_SECRET_KEY}`,
|
||||
},
|
||||
});
|
||||
|
||||
const s3Bucket = `https://s3-us-west-2.amazonaws.com/${process.env.S3_BUCKET}/`;
|
||||
|
||||
function getExtension(filename) {
|
||||
const i = filename.lastIndexOf('.');
|
||||
|
@ -27,4 +42,19 @@ export function signS3(req, res) {
|
|||
return res.json(result);
|
||||
}
|
||||
|
||||
export default signS3;
|
||||
export function copyObjectInS3(req, res) {
|
||||
const url = req.body.url;
|
||||
const objectKey = url.split("/").pop();
|
||||
|
||||
const fileExtension = getExtension(objectKey);
|
||||
const newFilename = uuid.v4() + fileExtension;
|
||||
const params = {
|
||||
Bucket: `${process.env.S3_BUCKET}`,
|
||||
CopySource: `${process.env.S3_BUCKET}/${objectKey}`,
|
||||
key: newFilename
|
||||
};
|
||||
const copy = client.copyObject(params);
|
||||
del.on('end', function() {
|
||||
res.json({url: `${s3Bucket}/${newFilename}`});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -4,5 +4,6 @@ import * as AWSController from '../controllers/aws.controller';
|
|||
const router = new Router();
|
||||
|
||||
router.route('/S3/sign').post(AWSController.signS3);
|
||||
router.route('/S3/copy').post(AWSController.copyObjectInS3);
|
||||
|
||||
export default router;
|
||||
|
|
18
server/utils/s3.js
Normal file
18
server/utils/s3.js
Normal file
|
@ -0,0 +1,18 @@
|
|||
import s3 from 's3';
|
||||
|
||||
const client = s3.createClient({
|
||||
maxAsyncS3: 20,
|
||||
s3RetryCount: 3,
|
||||
s3RetryDelay: 1000,
|
||||
multipartUploadThreshold: 20971520, // this is the default (20 MB)
|
||||
multipartUploadSize: 15728640, // this is the default (15 MB)
|
||||
s3Options: {
|
||||
accessKeyId: `${process.env.AWS_ACCESS_KEY}`,
|
||||
secretAccessKey: `${process.env.AWS_SECRET_KEY}`,
|
||||
},
|
||||
});
|
||||
|
||||
export function copyObjectOnS3(url, callback) {
|
||||
|
||||
}
|
||||
|
Loading…
Reference in a new issue