Create a zip file from a generator in Python?

后端 未结 10 1900
佛祖请我去吃肉
佛祖请我去吃肉 2020-11-30 07:32

I\'ve got a large amount of data (a couple gigs) I need to write to a zip file in Python. I can\'t load it all into memory at once to pass to the .writestr method of ZipFil

10条回答
  •  被撕碎了的回忆
    2020-11-30 08:33

    In case anyone stumbles upon this question, which is still relevant in 2017 for Python 2.7, here's a working solution for a true streaming zip file, with no requirement for the output to be seekable as in the other cases. The secret is to set bit 3 of the general purpose bit flag (see https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT section 4.3.9.1).

    Note that this implementation will always create a ZIP64-style file, allowing the streaming to work for arbitrarily large files. It includes an ugly hack to force the zip64 end of central directory record, so be aware it will cause all zipfiles written by your process to become ZIP64-style.

    import io
    import zipfile
    import zlib
    import binascii
    import struct
    
    class ByteStreamer(io.BytesIO):
        '''
        Variant on BytesIO which lets you write and consume data while
        keeping track of the total filesize written. When data is consumed
        it is removed from memory, keeping the memory requirements low.
        '''
        def __init__(self):
            super(ByteStreamer, self).__init__()
            self._tellall = 0
    
        def tell(self):
            return self._tellall
    
        def write(self, b):
            orig_size = super(ByteStreamer, self).tell()
            super(ByteStreamer, self).write(b)
            new_size = super(ByteStreamer, self).tell()
            self._tellall += (new_size - orig_size)
    
        def consume(self):
            bytes = self.getvalue()
            self.seek(0)
            self.truncate(0)
            return bytes
    
    class BufferedZipFileWriter(zipfile.ZipFile):
        '''
        ZipFile writer with true streaming (input and output).
        Created zip files are always ZIP64-style because it is the only safe way to stream
        potentially large zip files without knowing the full size ahead of time.
    
        Example usage:
        >>> def stream():
        >>>     bzfw = BufferedZip64FileWriter()
        >>>     for arc_path, buffer in inputs:  # buffer is a file-like object which supports read(size)
        >>>         for chunk in bzfw.streambuffer(arc_path, buffer):
        >>>             yield chunk
        >>>     yield bzfw.close()
        '''
        def __init__(self, compression=zipfile.ZIP_DEFLATED):
            self._buffer = ByteStreamer()
            super(BufferedZipFileWriter, self).__init__(self._buffer, mode='w', compression=compression, allowZip64=True)
    
        def streambuffer(self, zinfo_or_arcname, buffer, chunksize=2**16):
            if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
                zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname,
                                        date_time=time.localtime(time.time())[:6])
                zinfo.compress_type = self.compression
                zinfo.external_attr = 0o600 << 16     # ?rw-------
            else:
                zinfo = zinfo_or_arcname
    
            zinfo.file_size = file_size = 0
            zinfo.flag_bits = 0x08  # Streaming mode: crc and size come after the data
            zinfo.header_offset = self.fp.tell()
    
            self._writecheck(zinfo)
            self._didModify = True
    
            zinfo.CRC = CRC = 0
            zinfo.compress_size = compress_size = 0
            self.fp.write(zinfo.FileHeader())
            if zinfo.compress_type == zipfile.ZIP_DEFLATED:
                cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
            else:
                cmpr = None
    
            while True:
                buf = buffer.read(chunksize)
                if not buf:
                    break
    
                file_size += len(buf)
                CRC = binascii.crc32(buf, CRC) & 0xffffffff
                if cmpr:
                    buf = cmpr.compress(buf)
                    compress_size += len(buf)
    
                self.fp.write(buf)
                compressed_bytes = self._buffer.consume()
                if compressed_bytes:
                    yield compressed_bytes
    
            if cmpr:
                buf = cmpr.flush()
                compress_size += len(buf)
                self.fp.write(buf)
                zinfo.compress_size = compress_size
                compressed_bytes = self._buffer.consume()
                if compressed_bytes:
                    yield compressed_bytes
            else:
                zinfo.compress_size = file_size
    
            zinfo.CRC = CRC
            zinfo.file_size = file_size
    
            # Write CRC and file sizes after the file data
            # Always write as zip64 -- only safe way to stream what might become a large zipfile
            fmt = '

提交回复
热议问题