duplicating files works
This commit is contained in:
parent
f01a58353b
commit
fa04054d28
3 changed files with 8 additions and 24 deletions
|
@ -1,13 +1,13 @@
|
|||
import { browserHistory } from 'react-router';
|
||||
import axios from 'axios';
|
||||
import objectID from 'bson-objectid';
|
||||
import each from 'async/each';
|
||||
import * as ActionTypes from '../../../constants';
|
||||
import { showToast, setToastText } from './toast';
|
||||
import { setUnsavedChanges,
|
||||
justOpenedProject,
|
||||
resetJustOpenedProject,
|
||||
showErrorModal } from './ide';
|
||||
import each from 'async/each';
|
||||
|
||||
const ROOT_URL = process.env.API_URL;
|
||||
|
||||
|
@ -186,12 +186,14 @@ export function cloneProject() {
|
|||
return { ...file };
|
||||
});
|
||||
|
||||
// generate new IDS for all files
|
||||
const rootFile = newFiles.find(file => file.name === 'root');
|
||||
const newRootFileId = objectID().toHexString();
|
||||
rootFile.id = newRootFileId;
|
||||
rootFile._id = newRootFileId;
|
||||
generateNewIdsForChildren(rootFile, newFiles);
|
||||
//need to duplicate all files hosted on S3
|
||||
|
||||
// duplicate all files hosted on S3
|
||||
each(newFiles, (file, callback) => {
|
||||
if (file.url) {
|
||||
const formParams = {
|
||||
|
|
|
@ -51,10 +51,10 @@ export function copyObjectInS3(req, res) {
|
|||
const params = {
|
||||
Bucket: `${process.env.S3_BUCKET}`,
|
||||
CopySource: `${process.env.S3_BUCKET}/${objectKey}`,
|
||||
key: newFilename
|
||||
Key: newFilename
|
||||
};
|
||||
const copy = client.copyObject(params);
|
||||
del.on('end', function() {
|
||||
res.json({url: `${s3Bucket}/${newFilename}`});
|
||||
copy.on('end', function() {
|
||||
res.json({url: `${s3Bucket}${newFilename}`});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
import s3 from 's3';
|
||||
|
||||
const client = s3.createClient({
|
||||
maxAsyncS3: 20,
|
||||
s3RetryCount: 3,
|
||||
s3RetryDelay: 1000,
|
||||
multipartUploadThreshold: 20971520, // this is the default (20 MB)
|
||||
multipartUploadSize: 15728640, // this is the default (15 MB)
|
||||
s3Options: {
|
||||
accessKeyId: `${process.env.AWS_ACCESS_KEY}`,
|
||||
secretAccessKey: `${process.env.AWS_SECRET_KEY}`,
|
||||
},
|
||||
});
|
||||
|
||||
export function copyObjectOnS3(url, callback) {
|
||||
|
||||
}
|
||||
|
Loading…
Reference in a new issue