Let me start of by saying that I am normally very reluctant to post this questions as I always feel that there\'s an answer to everything SOMEWHERE on the internet. After sp
As per @Reinsbrain request, this is the Node.js version of implementing client side uploads to the server with "public-read" rights.
BACKEND (NODE.JS)
var AWS = require('aws-sdk');
var AWS_ACCESS_KEY_ID = process.env.S3_ACCESS_KEY;
var AWS_SECRET_ACCESS_KEY = process.env.S3_SECRET;
AWS.config.update({accessKeyId: AWS_ACCESS_KEY_ID, secretAccessKey: AWS_SECRET_ACCESS_KEY});
var s3 = new AWS.S3();
var moment = require('moment');
var S3_BUCKET = process.env.S3_BUCKET;
var crypto = require('crypto');
var POLICY_EXPIRATION_TIME = 10;// change to 10 minute expiry time
var S3_DOMAIN = process.env.S3_DOMAIN;
exports.writePolicy = function (filePath, contentType, maxSize, redirect, callback) {
var readType = "public-read";
var expiration = moment().add('m', POLICY_EXPIRATION_TIME);//OPTIONAL: only if you don't want a 15 minute expiry
var s3Policy = {
"expiration": expiration,
"conditions": [
["starts-with", "$key", filePath],
{"bucket": S3_BUCKET},
{"acl": readType},
["content-length-range", 2048, maxSize], //min 2kB to maxSize
{"redirect": redirect},
["starts-with", "$Content-Type", contentType]
]
};
// stringify and encode the policy
var stringPolicy = JSON.stringify(s3Policy);
var base64Policy = Buffer(stringPolicy, "utf-8").toString("base64");
// sign the base64 encoded policy
var testbuffer = new Buffer(base64Policy, "utf-8");
var signature = crypto.createHmac("sha1", AWS_SECRET_ACCESS_KEY)
.update(testbuffer).digest("base64");
// build the results object to send to calling function
var credentials = {
url: S3_DOMAIN,
key: filePath,
AWSAccessKeyId: AWS_ACCESS_KEY_ID,
acl: readType,
policy: base64Policy,
signature: signature,
redirect: redirect,
content_type: contentType,
expiration: expiration
};
callback(null, credentials);
}
FRONTEND assuming the values from server are in input fields and that you're submitting images via a form submission (i.e. POST since I couldn't get PUT to work):
function dataURItoBlob(dataURI, contentType) {
var binary = atob(dataURI.split(',')[1]);
var array = [];
for(var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {type: contentType});
}
function submitS3(callback) {
var base64Data = $("#file").val();//your file to upload e.g. img.toDataURL("image/jpeg")
var contentType = $("#contentType").val();
var xmlhttp = new XMLHttpRequest();
var blobData = dataURItoBlob(base64Data, contentType);
var fd = new FormData();
fd.append('key', $("#key").val());
fd.append('acl', $("#acl").val());
fd.append('Content-Type', contentType);
fd.append('AWSAccessKeyId', $("#accessKeyId").val());
fd.append('policy', $("#policy").val());
fd.append('signature', $("#signature").val());
fd.append("redirect", $("#redirect").val());
fd.append("file", blobData);
xmlhttp.onreadystatechange=function(){
if (xmlhttp.readyState==4) {
//do whatever you want on completion
callback();
}
}
var someBucket = "your_bucket_name"
var S3_DOMAIN = "https://"+someBucket+".s3.amazonaws.com/";
xmlhttp.open('POST', S3_DOMAIN, true);
xmlhttp.send(fd);
}
Note: I was uploading more than 1 image per submission so I added multiple iframes (with the FRONTEND code above) to do simultaneous multi-image uploads.