If I have existing files on Amazon\'s S3, what\'s the easiest way to get their md5sum without having to download the files?
Thanks
Below that's work for me to compare local file checksum with s3 etag. I used Python
def md5_checksum(filename):
m = hashlib.md5()
with open(filename, 'rb') as f:
for data in iter(lambda: f.read(1024 * 1024), b''):
m.update(data)
return m.hexdigest()
def etag_checksum(filename, chunk_size=8 * 1024 * 1024):
md5s = []
with open(filename, 'rb') as f:
for data in iter(lambda: f.read(chunk_size), b''):
md5s.append(hashlib.md5(data).digest())
m = hashlib.md5(b"".join(md5s))
print('{}-{}'.format(m.hexdigest(), len(md5s)))
return '{}-{}'.format(m.hexdigest(), len(md5s))
def etag_compare(filename, etag):
et = etag[1:-1] # strip quotes
print('et',et)
if '-' in et and et == etag_checksum(filename):
return True
if '-' not in et and et == md5_checksum(filename):
return True
return False
def main():
session = boto3.Session(
aws_access_key_id=s3_accesskey,
aws_secret_access_key=s3_secret
)
s3 = session.client('s3')
obj_dict = s3.get_object(Bucket=bucket_name, Key=your_key)
etag = (obj_dict['ETag'])
print('etag', etag)
validation = etag_compare(filename,etag)
print(validation)
etag_checksum(filename, chunk_size=8 * 1024 * 1024)
return validation