make inital changes for #169--need to migrate the locations of a user's s3 files to be namespaced under their userid

This commit is contained in:
Cassie Tarakajian 2017-04-06 14:34:14 -04:00
parent 23560c7879
commit aaa5e868e2
10 changed files with 115 additions and 38 deletions

View file

@ -30,7 +30,7 @@ function localIntercept(file, options = {}) {
});
}
export function dropzoneAcceptCallback(file, done) {
export function dropzoneAcceptCallback(userId, file, done) {
return () => {
// for text files and small files
// check mime type
@ -50,6 +50,7 @@ export function dropzoneAcceptCallback(file, done) {
name: file.name,
type: file.type,
size: file.size,
userId
// _csrf: document.getElementById('__createPostToken').value
},
{

View file

@ -30,9 +30,9 @@ class FileUploader extends React.Component {
text/plain,text/csv,.obj,video/webm,video/ogg,video/quicktime,video/mp4,
.otf,.ttf`,
dictDefaultMessage: 'Drop files here to upload or click to use the file browser',
accept: this.props.dropzoneAcceptCallback,
accept: this.props.dropzoneAcceptCallback.bind(this, this.props.project.owner.id),
sending: this.props.dropzoneSendingCallback,
complete: this.props.dropzoneCompleteCallback,
complete: this.props.dropzoneCompleteCallback
// error: (file, errorMessage) => {
// console.log(file);
// console.log(errorMessage);
@ -50,7 +50,21 @@ class FileUploader extends React.Component {
FileUploader.propTypes = {
dropzoneAcceptCallback: PropTypes.func.isRequired,
dropzoneSendingCallback: PropTypes.func.isRequired,
dropzoneCompleteCallback: PropTypes.func.isRequired
dropzoneCompleteCallback: PropTypes.func.isRequired,
project: PropTypes.shape({
owner: PropTypes.shape({
id: PropTypes.string.isRequired
})
})
};
FileUploader.defaultProps = {
project: {
id: undefined,
owner: {
id: undefined
}
}
};
function mapStateToProps(state) {

View file

@ -22,7 +22,7 @@ function getExtension(filename) {
}
export function deleteObjectsFromS3(keyList, callback) {
const keys = keyList.map((key) => { return {Key: key}; });
const keys = keyList.map((key) => { return { Key: key }; }); // eslint-disable-line
if (keyList.length > 0) {
const params = {
Bucket: `${process.env.S3_BUCKET}`,
@ -31,22 +31,20 @@ export function deleteObjectsFromS3(keyList, callback) {
},
};
const del = client.deleteObjects(params);
del.on('end', function() {
if(callback) {
del.on('end', () => {
if (callback) {
callback();
}
});
} else {
if(callback) {
callback();
}
} else if (callback) {
callback();
}
}
export function deleteObjectFromS3(req, res) {
const objectKey = req.params.object_key;
deleteObjectsFromS3([objectKey], function() {
res.json({ success:true });
deleteObjectsFromS3([objectKey], () => {
res.json({ success: true });
});
}
@ -64,7 +62,7 @@ export function signS3(req, res) {
});
const result = {
AWSAccessKeyId: process.env.AWS_ACCESS_KEY,
key: filename,
key: `${req.body.userId}/${filename}`,
policy: p.policy,
signature: p.signature
};
@ -73,7 +71,7 @@ export function signS3(req, res) {
export function copyObjectInS3(req, res) {
const url = req.body.url;
const objectKey = url.split("/").pop();
const objectKey = url.split('/').pop();
const fileExtension = getExtension(objectKey);
const newFilename = uuid.v4() + fileExtension;
@ -83,7 +81,7 @@ export function copyObjectInS3(req, res) {
Key: newFilename
};
const copy = client.copyObject(params);
copy.on('end', function() {
res.json({url: `${s3Bucket}${newFilename}`});
copy.on('end', () => {
res.json({ url: `${s3Bucket}${newFilename}` });
});
}

View file

@ -1,6 +1,7 @@
import Project from '../models/project';
import each from 'async/each';
import moment from 'moment';
import Project from '../models/project';
import { resolvePathToFile } from '../utils/filePath';
import { deleteObjectsFromS3 } from './aws.controller';
@ -48,7 +49,10 @@ function deleteMany(files, ids) {
each(ids, (id, cb) => {
if (files.id(id).url) {
if (!process.env.S3_DATE || (process.env.S3_DATE && moment(process.env.S3_DATE) < moment(files.id(id).createdAt))) {
objectKeys.push(files.id(id).url.split("/").pop());
const urlComponents = files.id(id).url.split('/');
const key = urlComponents.pop();
const userId = urlComponents.pop();
objectKeys.push(`${userId}/${key}`);
}
}
files.id(id).remove();

View file

@ -100,7 +100,12 @@ function deleteFilesFromS3(files) {
}
return false;
})
.map((file) => file.url.split('/').pop())
.map((file) => {
const urlComponents = file.url.split('/');
const key = urlComponents.pop();
const userId = urlComponents.pop();
return `${userId}/${key}`;
})
);
}

View file

@ -183,6 +183,17 @@ export function userExists(username, callback) {
));
}
export function saveUser(res, user) {
user.save((saveErr) => {
if (saveErr) {
res.status(500).json({ error: saveErr });
return;
}
res.json(user);
});
}
export function updateSettings(req, res) {
User.findById(req.user.id, (err, user) => {
if (err) {
@ -198,29 +209,17 @@ export function updateSettings(req, res) {
user.username = req.body.username;
if (req.body.currentPassword) {
user.comparePassword(req.body.currentPassword, (err, isMatch) => {
if (err) throw err;
user.comparePassword(req.body.currentPassword, (passwordErr, isMatch) => {
if (passwordErr) throw passwordErr;
if (!isMatch) {
res.status(401).json({ error: 'Current password is invalid.' });
return;
} else {
user.password = req.body.newPassword;
saveUser(res, user);
}
user.password = req.body.newPassword;
saveUser(res, user);
});
} else {
saveUser(res, user);
}
});
}
export function saveUser(res, user) {
user.save((saveErr) => {
if (saveErr) {
res.status(500).json({ error: saveErr });
return;
}
res.json(user);
});
}

View file

@ -0,0 +1,53 @@
/* eslint-disable */
import s3 from 's3';
import path from 'path';
import mongoose from 'mongoose';
import User from '../models/user';
import Project from '../models/project';
require('dotenv').config({path: path.resolve('.env')});
mongoose.connect('mongodb://localhost:27017/p5js-web-editor');
mongoose.connection.on('error', () => {
console.error('MongoDB Connection Error. Please make sure that MongoDB is running.');
process.exit(1);
});
let client = s3.createClient({
maxAsyncS3: 20,
s3RetryCount: 3,
s3RetryDelay: 1000,
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: `${process.env.AWS_ACCESS_KEY}`,
secretAccessKey: `${process.env.AWS_SECRET_KEY}`,
region: 'us-west-2'
},
});
Project.find({}, (err, projects) => {
projects.forEach((project, projectIndex) => {
if (!project.user) return;
const userId = project.user.valueOf();
project.files.forEach((file, fileIndex) => {
if (file.url && file.url.includes(process.env.S3_BUCKET)) {
const key = file.url.split('/').pop();
console.log(key);
const params = {
Bucket: `${process.env.S3_BUCKET}`,
CopySource: `${process.env.S3_BUCKET}/${key}`,
Key: `${userId}/${key}`
};
client.moveObject(params)
.on('err', (err) => {
console.log(err);
})
.on('end', () => {
file.url = `https://s3-us-west-2.amazonaws.com/${process.env.S3_BUCKET}/${userId}/${key}`;
project.save((err, savedProject) => {
console.log(`updated file ${key}`);
});
});
}
});
});
});

View file

@ -0,0 +1,3 @@
require('babel-register');
require('babel-polyfill');
require('./s3UnderUser');