How to upload to AWS S3 directly from browser using a pre-signed URL instead of credentials?

后端 未结 7 1144
天命终不由人
天命终不由人 2020-12-08 03:07

We\'d like to use Javascript AWS SDK to upload files to S3, but without using credentials at all. Uploading using credentials works, but we cannot generate an AWS IAM user f

相关标签:
7条回答
  • 2020-12-08 03:17

    If you're not using jQuery, this is the minimal you need on the front end:

    var xhr = new XMLHttpRequest();
    xhr.open('PUT', signedUrl, true);
    xhr.setRequestHeader('Content-Type', signedUrlContentType);
    xhr.onload = () => {
      if (xhr.status === 200) {
        // success!
      }
    };
    xhr.onerror = () => {
      // error...
    };
    xhr.send(file); // `file` is a File object here 
    

    See File object docs: https://developer.mozilla.org/en-US/docs/Web/API/File

    Then you can add your upload progress as usual:

    xhr.upload.onprogress = (event) => {
      if (event.lengthComputable) {
        var percent = Math.round((event.loaded / event.total) * 100)
        console.log(percent);
      }
    };
    
    0 讨论(0)
  • 2020-12-08 03:18

    In project, on what I am working right now I have file uploads from client directly to S3, in my case it works in few steps:

    1. request pre-signed form with settings for upload, from server (it is signed on a server, because I can not pass access keys to the client, and also I need to apply some limitations to upload)
    2. upload file to S3 using XHR2 (for old browsers you can use hack with hidden iframe or browser plugins like flash)

    There is main code parts from it: https://gist.github.com/zxbodya/3cdabd9172bcc89f8ac5

    0 讨论(0)
  • 2020-12-08 03:21

    Quiet the old question but it did help me a bit to get it finally done. My solution is based on PHP and JavaScript with jQuery.

    I have the entire solution nicely wrapped at https://github.com/JoernBerkefeld/s3SignedUpload but here are the essentials:

    api.php:

    <?php
    require_once '/server/path/to/aws-autoloader.php';
    use Aws\Common\Aws;
    
    $BUCKET = "my-bucket";
    $CONFIG = "path-to-iam-credentials-file-relative-to-root.php"
    
    function getSignedUrl($filename, $mime) {
        $S3 = Aws::factory( $CONFIG )->get('S3');
        if(!$filename) {
            return $this->error('filename missing');
        }
        if(!$mime) {
            return $this->error('mime-type missing');
        }
        $final_filename = $this->get_file_name($filename);
        try {
            $signedUrl = $S3->getCommand('PutObject', array(
                'Bucket' => $BUCKET,
                'Key' => $this->folder . $final_filename,
                'ContentType' => $mime,
                'Body'        => '',
                'ContentMD5'  => false
            ))->createPresignedUrl('+30 minutes');
        } catch (S3Exception $e) {
            echo $e->getMessage() . "\n";
        }
        $signedUrl .= '&Content-Type='.urlencode($mime);
        return $signedUrl;
    }
    
    
    echo getSignedUrl($_GET['filename'],$_GET['mimetype']);
    

    please make sure to add user authentication to your api.php. Else everyone who knows the path to that file could upload files to your bucket.

    credentials.inc.php:

    <?php
    return array(
        'includes' => array('_aws'),
        'services' => array(
            'default_settings' => array(
                'params' => array(
                    'key'    => 'MY-ACCESS-KEY',
                    'secret' => 'MY-SECRECT',
                    'region'  => 'eu-west-1' // set to your region
                )
            )
        )
    );
    

    client.js:

    $("input[type=file]").onchange = function () {
        for (var file, i = 0; i < this.files.length; i++) {
            file = this.files[i];
            $.ajax({
                url : s3presignedApiUri,
                data: 'file='+ file.name + '&mime=' + file.type,
                type : "GET",
                dataType : "json",
                cache : false,
            })
            .done(function(s3presignedUrl) {
                $.ajax({
                    url : s3presignedUrl,
                    type : "PUT",
                    data : file,
                    dataType : "text",
                    cache : false,
                    contentType : file.type,
                    processData : false
                })
                .done(function(){
                    console.info('YEAH', s3presignedUrl.split('?')[0].substr(6));
                }
                .fail(function(){
                    console.error('damn...');
                }
            })
        }
    };
    

    s3 cors settings (PUT & OPTIONS are actually needed, but cannot enable OPTIONS directly...):

    <?xml version="1.0" encoding="UTF-8"?>
    <CORSConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
        <CORSRule>
            <AllowedOrigin>*</AllowedOrigin>
            <AllowedMethod>GET</AllowedMethod>
            <AllowedMethod>POST</AllowedMethod>
            <AllowedMethod>PUT</AllowedMethod>
            <AllowedMethod>HEAD</AllowedMethod>
            <AllowedMethod>DELETE</AllowedMethod>
            <MaxAgeSeconds>3000</MaxAgeSeconds>
            <AllowedHeader>*</AllowedHeader>
        </CORSRule>
    </CORSConfiguration>
    
    0 讨论(0)
  • 2020-12-08 03:21

    Please add ACL and ContentType, it'll make it work.

    const param = {
          Bucket: 'Bucket',
          Key: 'fiileName',
          ACL: 'public-read',
          ContentType: 'fileType'
        };
    s3.getSignedUrl('putObject', param, function (err, url) {
             console.log('The URL is', url);
        });
    
    0 讨论(0)
  • 2020-12-08 03:32

    I prefer this cleaner approach, via github:

    If you already have a presigned URL generated for the browser, you can simply send an XHR request with that URL and the payload to upload to S3. The SDK would not be required to do this. A jQuery example below:

    $.ajax({
      url: presignedUrl, // the presigned URL
      type: 'PUT',
      data: 'data to upload into URL',
      success: function() { console.log('Uploaded data successfully.'); }
    });
    
    0 讨论(0)
  • 2020-12-08 03:34

    Generate Url

    const AWS = require("aws-sdk");
    const s3 = new AWS.S3({
       endpoint: 's3-ap-south-1.amazonaws.com',   // Put you region
       accessKeyId: 'AKXXXXXXXXXXXXXXXA6U',       // Put you accessKeyId
       secretAccessKey: 'kzFHoXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXssoGp',   // Put you accessKeyId
       Bucket: 'Bucket-Name',         // Put your bucket name
       signatureVersion: 'v4',
       region: 'ap-south-1'           // Put you region
    });
    
    const getSingedUrlforPut = async () => {
    const params = {
        Bucket: 'Bucket-Name',
        Key: '317ec11af14a46b89f400bcf8f9fff1222.pdf',
        Expires: 60 * 5
      };
    try {
        const url = await new Promise((resolve, reject) => {
          s3.getSignedUrl('putObject', params, (err, url) => {
            err ? reject(err) : resolve(url);
          });
        });
        console.log(url)
      } catch (err) {
        if (err) {
          console.log(err)
        }
      }
    }
    getSingedUrlforPut()
    

    Upload file Via ajax

    var form = new FormData();
    form.append("", fileInput.files[0], "director_pan_af8ef2d261c46877f95038622c96e7c0.pdf");
    var settings = {
      "url": "https://sme-testing.s3-ap-south-1.amazonaws.com/317ec11af14a46b89f400bcf8f9fff1222.pdf?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIXXXXXXXXXXXX6U%2F20200525%2Fap-south-1%2Fs3%2Faws4_request&X-Amz-Date=20200525T083419Z&X-Amz-Expires=300&X-Amz-Signature=ea063731d7d043b62d0dc7c0984f4d5792c7f7f41e9ffb52a97d62adadcef422&X-Amz-SignedHeaders=host",
      "method": "PUT",
      "timeout": 0,
      "processData": false,
      "mimeType": "multipart/form-data",
      "contentType": false,
      "data": form
    };
    $.ajax(settings).done(function (response) {
      console.log(response);
    });
    
    0 讨论(0)
提交回复
热议问题