Skip to content

Instantly share code, notes, and snippets.

@StaticallyTypedRice
Last active September 11, 2019 07:03
Show Gist options
  • Save StaticallyTypedRice/df1456ca2ce8115a57aebdd9a023c55b to your computer and use it in GitHub Desktop.
Save StaticallyTypedRice/df1456ca2ce8115a57aebdd9a023c55b to your computer and use it in GitHub Desktop.
Iterate through a file containing a list of line break separated file paths and create files containing hashes of the target files.
#! /usr/bin/env python3
import argparse
import hashlib
def create_hashfiles_from_filelist(filelist, algorithm: str = 'sha3-512'):
'''
Iterate through a list of file paths and create files containing hashes of the target files.
If the input path is "./folder/file.txt", the hash file will be "./folder/file.txt.sha256".
The file extension will depend on the hashing algorithm used.
WARNING: If a file with the same name as the hash file already exists, it will be overwritten!
:param: filelist A list of strings containing file paths
:param: hash_algorithm The hashing algorithm to use. The following algorithems are supported:
sha3-512, shake128, sha3-384, sha3-256, sha512-224, blake2s256, shake256,
blake2b512, md4, md5, sha1, sha224, sm3, sha512-256, sha3-224, sha384,
md5-sha1, sha256, sha512, ripemd160, whirlpool
'''
BLOCKSIZE = 60000
hasher = hashlib.new(algorithm)
for p in filelist:
# All the code for hashing the files is in a try-catch block that catches all exceptions.
# This is to prevent the script from crashing as a result of an error while hashing
# It will simply move on to the next file. Hashing large and/or many files can take a long
# time and this allows the script to be run unattended without the frustration of it crashing
# due to a trivial error.
try:
print(f'Hashing {f}')
with open(f, 'rb') as target:
# Read the file in blocks to prevent running out of memory.
b = target.read(BLOCKSIZE)
while len(b) > 0:
hasher.update(b)
b = target.read(BLOCKSIZE)
hash_output = hasher.hexdigest()
# Write the hash file
with open(f'{f}.{algorithm}', 'w+') as hashfile:
hashfile.write(f'{algorithm}: {hash_output}\n')
print(f'\t{hash_output}')
except Exception as e:
print('ERROR:')
print(f'\tFile: {f}')
print(f'\t{e}')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Iterate through a file containing a list of line break \
separated file paths and create files containing hashes \
of the target files.')
parser.add_argument('filelist', type=str,
help='A file containing a list of line break separated file paths.')
parser.add_argument('--algorithm', type=str, default='sha3-512',
help='The hashing algorithm.')
args = parser.parse_args()
print(f'Hashing from path list "{args.filelist}" using {args.algorithm}...\n')
with open(args.filelist, 'r') as f:
filelist = f.read().split('\n')
create_hashfiles_from_filelist(filelist, args.algorithm)
'''LICENSE:
The MIT License (MIT)
Copyright (c) 2019 Richie Zhang
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment