Create a zip file on S3 from files on S3 using Lambda Node

前端 未结 3 1001
不思量自难忘°
不思量自难忘° 2020-12-05 10:29

I need to create a Zip file that consists of a selection of files (videos and images) located in my s3 bucket.

The problem at the moment using my code below is that

3条回答
  •  清歌不尽
    2020-12-05 10:35

    I formated the code according to @iocoker.

    main entry

    // index.js
    
    'use strict';
    const S3Zip = require('./s3-zip')
    
    const params = {
      files: [
        {
          fileName: '1.jpg',
          key: 'key1.JPG'
        },
        {
          fileName: '2.jpg',
          key: 'key2.JPG'
        }
      ],
      zippedFileKey: 'zipped-file-key.zip'
    }
    
    exports.handler = async event => {
      const s3Zip = new S3Zip(params);
      await s3Zip.process();
    
      return {
        statusCode: 200,
        body: JSON.stringify(
          {
            message: 'Zip file successfully!'
          }
        )
      };
    
    }
    
    

    Zip file util

    // s3-zip.js
    
    'use strict';
    const fs = require('fs');
    const AWS = require("aws-sdk");
    
    const Archiver = require('archiver');
    const Stream = require('stream');
    
    const https = require('https');
    const sslAgent = new https.Agent({
      KeepAlive: true,
      rejectUnauthorized: true
    });
    sslAgent.setMaxListeners(0);
    AWS.config.update({
      httpOptions: {
        agent: sslAgent,
      },
      region: 'us-east-1'
    });
    
    module.exports = class S3Zip {
      constructor(params, bucketName = 'default-bucket') {
        this.params = params;
        this.BucketName = bucketName;
      }
    
      async process() {
        const { params, BucketName } = this;
        const s3 = new AWS.S3({ apiVersion: '2006-03-01', params: { Bucket: BucketName } });
    
        // create readstreams for all the output files and store them
        const createReadStream = fs.createReadStream;
        const s3FileDwnldStreams = params.files.map(item => {
          const stream = s3.getObject({ Key: item.key }).createReadStream();
          return {
            stream,
            fileName: item.fileName
          }
        });
    
        const streamPassThrough = new Stream.PassThrough();
        // Create a zip archive using streamPassThrough style for the linking request in s3bucket
        const uploadParams = {
          ACL: 'private',
          Body: streamPassThrough,
          ContentType: 'application/zip',
          Key: params.zippedFileKey
        };
    
        const s3Upload = s3.upload(uploadParams, (err, data) => {
          if (err) {
            console.error('upload err', err)
          } else {
            console.log('upload data', data);
          }
        });
    
        s3Upload.on('httpUploadProgress', progress => {
          // console.log(progress); // { loaded: 4915, total: 192915, part: 1, key: 'foo.jpg' }
        });
    
        // create the archiver
        const archive = Archiver('zip', {
          zlib: { level: 0 }
        });
        archive.on('error', (error) => {
          throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
        });
    
        // connect the archiver to upload streamPassThrough and pipe all the download streams to it
        await new Promise((resolve, reject) => {
          console.log("Starting upload of the output Files Zip Archive");
    
          s3Upload.on('close', resolve());
          s3Upload.on('end', resolve());
          s3Upload.on('error', reject());
    
          archive.pipe(streamPassThrough);
          s3FileDwnldStreams.forEach((s3FileDwnldStream) => {
            archive.append(s3FileDwnldStream.stream, { name: s3FileDwnldStream.fileName })
          });
          archive.finalize();
    
        }).catch((error) => {
          throw new Error(`${error.code} ${error.message} ${error.data}`);
        });
    
        // Finally wait for the uploader to finish
        await s3Upload.promise();
    
      }
    }
    

提交回复
热议问题