duplicating files works
This commit is contained in:
parent
f01a58353b
commit
fa04054d28
3 changed files with 8 additions and 24 deletions
|
@ -1,13 +1,13 @@
|
||||||
import { browserHistory } from 'react-router';
|
import { browserHistory } from 'react-router';
|
||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
import objectID from 'bson-objectid';
|
import objectID from 'bson-objectid';
|
||||||
|
import each from 'async/each';
|
||||||
import * as ActionTypes from '../../../constants';
|
import * as ActionTypes from '../../../constants';
|
||||||
import { showToast, setToastText } from './toast';
|
import { showToast, setToastText } from './toast';
|
||||||
import { setUnsavedChanges,
|
import { setUnsavedChanges,
|
||||||
justOpenedProject,
|
justOpenedProject,
|
||||||
resetJustOpenedProject,
|
resetJustOpenedProject,
|
||||||
showErrorModal } from './ide';
|
showErrorModal } from './ide';
|
||||||
import each from 'async/each';
|
|
||||||
|
|
||||||
const ROOT_URL = process.env.API_URL;
|
const ROOT_URL = process.env.API_URL;
|
||||||
|
|
||||||
|
@ -186,18 +186,20 @@ export function cloneProject() {
|
||||||
return { ...file };
|
return { ...file };
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// generate new IDS for all files
|
||||||
const rootFile = newFiles.find(file => file.name === 'root');
|
const rootFile = newFiles.find(file => file.name === 'root');
|
||||||
const newRootFileId = objectID().toHexString();
|
const newRootFileId = objectID().toHexString();
|
||||||
rootFile.id = newRootFileId;
|
rootFile.id = newRootFileId;
|
||||||
rootFile._id = newRootFileId;
|
rootFile._id = newRootFileId;
|
||||||
generateNewIdsForChildren(rootFile, newFiles);
|
generateNewIdsForChildren(rootFile, newFiles);
|
||||||
//need to duplicate all files hosted on S3
|
|
||||||
|
// duplicate all files hosted on S3
|
||||||
each(newFiles, (file, callback) => {
|
each(newFiles, (file, callback) => {
|
||||||
if (file.url) {
|
if (file.url) {
|
||||||
const formParams = {
|
const formParams = {
|
||||||
url: file.url
|
url: file.url
|
||||||
};
|
};
|
||||||
axios.post(`${ROOT_URL}/S3/copy`, formParams, {withCredentials: true})
|
axios.post(`${ROOT_URL}/S3/copy`, formParams, { withCredentials: true })
|
||||||
.then((response) => {
|
.then((response) => {
|
||||||
file.url = response.data.url;
|
file.url = response.data.url;
|
||||||
callback(null);
|
callback(null);
|
||||||
|
|
|
@ -51,10 +51,10 @@ export function copyObjectInS3(req, res) {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: `${process.env.S3_BUCKET}`,
|
Bucket: `${process.env.S3_BUCKET}`,
|
||||||
CopySource: `${process.env.S3_BUCKET}/${objectKey}`,
|
CopySource: `${process.env.S3_BUCKET}/${objectKey}`,
|
||||||
key: newFilename
|
Key: newFilename
|
||||||
};
|
};
|
||||||
const copy = client.copyObject(params);
|
const copy = client.copyObject(params);
|
||||||
del.on('end', function() {
|
copy.on('end', function() {
|
||||||
res.json({url: `${s3Bucket}/${newFilename}`});
|
res.json({url: `${s3Bucket}${newFilename}`});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
import s3 from 's3';
|
|
||||||
|
|
||||||
const client = s3.createClient({
|
|
||||||
maxAsyncS3: 20,
|
|
||||||
s3RetryCount: 3,
|
|
||||||
s3RetryDelay: 1000,
|
|
||||||
multipartUploadThreshold: 20971520, // this is the default (20 MB)
|
|
||||||
multipartUploadSize: 15728640, // this is the default (15 MB)
|
|
||||||
s3Options: {
|
|
||||||
accessKeyId: `${process.env.AWS_ACCESS_KEY}`,
|
|
||||||
secretAccessKey: `${process.env.AWS_SECRET_KEY}`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
export function copyObjectOnS3(url, callback) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
Loading…
Reference in a new issue