-
Notifications
You must be signed in to change notification settings - Fork 0
/
folder_hash_compare.py
205 lines (162 loc) · 6.76 KB
/
folder_hash_compare.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
# this code is still WIP
import os
import sys
import hashlib
import subprocess
import zlib
import logging
import time
import threading
import argparse
from multiprocessing.pool import ThreadPool
parser = argparse.ArgumentParser(
prog='folder_hash_compare.py',
description='Compares the hashes of all files in 2 folders',
epilog='Folder Hash Compare - https://')
parser.add_argument('-p', '--primary', help='Primary folder, f.e. -p C:\\Folder1\\ or -p /home/user/folder1')
parser.add_argument('-s', '--secondary', help='Secondary folder, f.e. -s D:\\Folder2\\ or -s /home/user/folder2')
parser.add_argument('-m', '--missing', action='store_true', help='Search for missing files in either location')
parser.add_argument('-v', '--verbose', action='store_true', help='Enables verbose logging')
args = parser.parse_args()
# define the paths of the two network folders to compare
folder1_path = r""
folder2_path = r""
# hash algorythm (CRC32, MD5, SHA256)
hash_algorithm = "CRC32"
# define counting variables
files = 0
# files_amount = 0
# text markup
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
#generate hash value of file
def generate_file_hash(file_path, hash_algorithm="CRC32"):
with open(file_path, "rb") as f:
global files
files += 1
# sys.stdout.write("Processing file %d of %d (%d%%)\r\n\033[K" % (files, files_amount, (files/files_amount)*100) )
# sys.stdout.write("Generating hash for file: %s\r\033[F" % (file_path) )
sys.stdout.flush()
file_data = f.read()
if hash_algorithm == "CRC32":
file_hash = zlib.crc32(file_data)
elif hash_algorithm == "MD5":
file_hash = hashlib.md5(file_data).hexdigest()
elif hash_algorithm == "SHA256":
file_hash = hashlib.sha256(file_data).hexdigest()
return file_hash
# function to recursively get a list of all files in a folder and its subfolders
def get_all_files(folder_path):
global files_amount
all_files = []
for root, dirs, files in os.walk(folder_path):
for filename in files:
file_path = os.path.join(root, filename)
all_files.append(file_path)
return all_files
def get_files_amount(folder_path):
files_amount = 0
for root, dirs, files in os.walk(folder_path):
for filename in files:
files_amount += 1
return files_amount
def folder_generate_hashes(folder_path):
# generate hash values for each file in folder
folder_hashes = {}
for file_path in get_all_files(folder_path):
file_hash = generate_file_hash(file_path, hash_algorithm)
relative_path = os.path.relpath(file_path, folder_path)
folder_hashes[relative_path] = file_hash
if(args.verbose):
print(f"Generated hash for: {file_path} [{file_hash}]")
return folder_hashes
def main():
pool = ThreadPool(processes=2)
files_completed = 0
files_errors = 0
files_missing = 0
# set logging file parameter
if not os.path.isdir("logs"):
os.makedirs("logs")
logging.basicConfig(filename="logs/log_"+str(time.time())+".txt", level=logging.INFO)
# start time
start = time.time()
folder1_hashes = {}
folder2_hashes = {}
# future = threading.Future()
f1_amount = get_files_amount(folder1_path)
f2_amount = get_files_amount(folder2_path)
# def gen_folder1_hashes(folder_path=folder1_path):
# folder1_hashes
# folder1_hashes = folder_generate_hashes(folder_path)
# def gen_folder2_hashes(folder_path=folder2_path):
# folder2_hashes
# folder2_hashes = folder_generate_hashes(folder_path)
# multithreading
# make 2 threads, one for each folder path
# t1 = threading.Thread(target=folder_generate_hashes, args=(folder1_path,))
# t2 = threading.Thread(target=folder_generate_hashes, args=(folder2_path,))
# start each job
# t1.start()
# t2.start()
# wait for both jobs to be finished before continuing
# t1.join()
# t2.join()
# folder1_hashes = future.result()
# folder2_hashes = t2.result()
async_result1 = pool.apply_async(folder_generate_hashes, args = (folder1_path, ))
async_result2 = pool.apply_async(folder_generate_hashes, args = (folder2_path, ))
# do some other stuff in the main process
pool.close()
pool.join()
# get the return value from your function.
folder1_hashes = async_result1.get()
folder2_hashes = async_result2.get()
if(args.missing):
# check for missing files in folder 1
for file_path in get_all_files(folder2_path):
relative_path = os.path.relpath(file_path, folder2_path)
if relative_path not in folder1_hashes:
if(args.verbose):
print(bcolors.WARNING + f"{relative_path} is missing from {folder1_path}." + bcolors.ENDC)
logging.info(f"[WARNING - MISSING FILE]: {relative_path}")
files_missing += 1
# check for missing files in folder 2
for file_path in get_all_files(folder1_path):
relative_path = os.path.relpath(file_path, folder1_path)
if relative_path not in folder2_hashes:
if(args.verbose):
print(bcolors.WARNING + f"{relative_path} is missing from {folder2_path}." + bcolors.ENDC)
logging.info(f"[WARNING - MISSING FILE]: {relative_path}")
files_missing += 1
# compare the hash values for each file in both folders
for relative_path in set(folder1_hashes.keys()).intersection(set(folder2_hashes.keys())):
if folder1_hashes[relative_path] != folder2_hashes[relative_path]:
if(args.verbose):
print(bcolors.FAIL + f"Hash values for {relative_path} do not match." + bcolors.ENDC)
logging.error(f"[FILE HASH ERROR]: {relative_path}")
files_errors += 1
else:
if(args.verbose):
print(bcolors.OKGREEN + f"Hash values for {relative_path} match." + bcolors.ENDC)
logging.info(f"[OK]: {relative_path}")
files_completed += 1
# end time
end = time.time()
files_amount = f1_amount + f2_amount
# process output information
print("\nProcess finished in {:.2f}".format(round((end - start), 2)) + " seconds")
print(f"Processed {files_amount} file(s): "
+ bcolors.OKGREEN + f"\n{files_completed} file(s) OK" + bcolors.ENDC
+ bcolors.FAIL + f"\n{files_errors} file(s) FAILED" + bcolors.ENDC
+ bcolors.WARNING + f"\n{files_missing} file(s) MISSING" + bcolors.ENDC)
if __name__ == '__main__':
main()