From 440a3e09d6a8cb88eadca5845a79473a1527b401 Mon Sep 17 00:00:00 2001 From: Michael Akridge Date: Mon, 5 Jun 2023 13:27:42 -1000 Subject: [PATCH] python source code added for all tools --- toolbox/HEIC_HEIF_converter/heic_converter.py | 61 ++++++ .../archive-manifest-file-tool/mnftool_gui.py | 184 ++++++++++++++++++ toolbox/file-copy-tool/filecopy_tool.py | 45 +++++ toolbox/folder-stats-tool/folderstats.py | 87 +++++++++ .../garmin-gps-file-converter/gpxconverter.py | 62 ++++++ 5 files changed, 439 insertions(+) create mode 100644 toolbox/HEIC_HEIF_converter/heic_converter.py create mode 100644 toolbox/archive-manifest-file-tool/mnftool_gui.py create mode 100644 toolbox/file-copy-tool/filecopy_tool.py create mode 100644 toolbox/folder-stats-tool/folderstats.py create mode 100644 toolbox/garmin-gps-file-converter/gpxconverter.py diff --git a/toolbox/HEIC_HEIF_converter/heic_converter.py b/toolbox/HEIC_HEIF_converter/heic_converter.py new file mode 100644 index 0000000..be81496 --- /dev/null +++ b/toolbox/HEIC_HEIF_converter/heic_converter.py @@ -0,0 +1,61 @@ +import os +from PIL import Image +from pillow_heif import register_heif_opener +from gooey import Gooey, GooeyParser +# Iterate through all the HEIC files in the directory + +def convert_images(path): + register_heif_opener() + for root, dirs, files in os.walk(path, topdown=True): + for i,f in enumerate(files, start=1): + #COPY_FROM = Path(root) + input_file = os.path.join(root, f) + print('file:'+input_file) + if input_file.endswith(".heic") or input_file.endswith(".heif"): + try: + with Image.open(input_file) as im: + im = im.convert("RGB") + output_file = os.path.join(root, os.path.splitext(f)[0] + ".JPG") + im.save(output_file, "JPEG") + except: + print(f"Error: {f} is not a valid HEIC or HEIF file") + else: + print(f"Error: {f} is not a valid image file") + +def check_slash(string): + slash_to_add = "\\" + if string and len(string) > 3: + return string + else: + newvalue = os.path.join(string,slash_to_add) + return newvalue +@Gooey(program_name='HEIC and HEIF Converter - HEIC/HEIF to JPG.', + menu=[{ + 'name': 'File', + 'items': [{ + 'type': 'AboutDialog', + 'menuTitle': 'About', + 'name': 'HEIC and HEIF Converter - HEIC/HEIF to JPG.', + 'description': 'Python based HEIC and HEIF Converter - HEIC/HEIF to JPG.', + 'version': '1.0', + 'copyright': '', + 'website': '', + 'developer': 'Michael Akridge'}]}]) + +def parse_args(): + parser = GooeyParser(description='HEIC and HEIF Converter') + parser.add_argument('SELECT_PATH', widget='DirChooser',type=check_slash) + return parser.parse_args() + +def main(): + # setup file path + args = parse_args() + pathvalue = args.SELECT_PATH + print(pathvalue) + convert_images(pathvalue) + print('---- Working ----') + print('-------------------------------------------------') + print('---- Complete ----') + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/toolbox/archive-manifest-file-tool/mnftool_gui.py b/toolbox/archive-manifest-file-tool/mnftool_gui.py new file mode 100644 index 0000000..c3ec14f --- /dev/null +++ b/toolbox/archive-manifest-file-tool/mnftool_gui.py @@ -0,0 +1,184 @@ +# Import the libraries we need for this script +import hashlib +import optparse +import os +import os.path +import sys +from gooey import Gooey, GooeyParser + +def read_hash_from_md5_file(md5_filename): + """This function reads a hash out of a .md5 file.""" + + with open(md5_filename) as file: + for line in file: + possible_hash = pos = line.split(",")[1] + if len(possible_hash) == 32: + return possible_hash + + return None # failed to find the hash + + +def generate_md5_hash(filename, block_size=2 ** 20, progress_blocks=128): + """This function generates an md5 hash for a given file.""" + + md5 = hashlib.md5() + blocks = 0 + total_blocks = 1 + (os.path.getsize(filename) / block_size) + with open(filename, 'rb') as file: + while True: + data = file.read(block_size) + if not data: + break + md5.update(data) + # Display progress in the command line + if (blocks % progress_blocks) == 0: + percentage_string = "{0}%".format(100 * blocks / total_blocks) + sys.stdout.write("\r{1:<10}{0}".format(filename, percentage_string)) + sys.stdout.flush() + blocks += 1 + return md5.hexdigest() + + +def check_against_md5_file(filename, md5_filename): + """This function checks a filename against its md5 filename.""" + + # Get the expected hash from the .md5 file + expected_hash = read_hash_from_md5_file(md5_filename) + + # If we couldn't read the expected hash, return an error + if expected_hash is None: + print("ERROR {0}".format(filename)) + print("Could not read a valid md5 hash from {0}".format(md5_filename)) + return (filename, 'could not read from .md5 file', 'not generated') + + # Generate the actual hash for the file being protected + actual_hash = generate_md5_hash(filename) + + # Print out success or failure messages + error = None + if actual_hash == expected_hash: + sys.stdout.write("\rOK {0}\n".format(filename)) + sys.stdout.flush() + else: + sys.stdout.write("\rERROR {0}\n".format(filename)) + sys.stdout.flush() + print(" expected hash {0}".format(expected_hash)) + print(" actual hash is {0}".format(actual_hash)) + error = (filename, expected_hash, actual_hash) + + return error + + +def generate_md5_file_for(filename, md5_filename): + """This function generates an md5 file for an existing file.""" + try: + output_file = open(md5_filename, 'w') + except IOError: + sys.stdout.write("ERROR: can't write to file {0}\n".format(md5_filename)) + + generated_hash = generate_md5_hash(filename) + file_size = os.path.getsize(filename) + output_file.write("{0},{1},{2}\n".format(os.path.basename(filename),generated_hash,file_size)) + output_file.close() + + sys.stdout.write("\rDONE {0}\n".format(filename)) + sys.stdout.flush() + + +def get_file_info_dictionaries(dirs): + """Walk the directories recursively and match up .mnf files to the files they describe.""" + + # Recursively walk the directories, trying to pair up the .md5 files + file_info_dicts = {} + for (dirpath, dirnames, filenames) in os.walk(dirs): + for each_filename in filenames: + full_file_path = os.path.join(dirpath, each_filename) + is_md5_file = (full_file_path[-4:].lower() == '.mnf') + if is_md5_file: + key = full_file_path[:-4] + else: + key = full_file_path + d = file_info_dicts.setdefault(key, dict(file=False, md5=False)) + if is_md5_file: + d['md5'] = True + else: + d['file'] = True + + # Print information about what was found + files_found = 0 + md5_found = 0 + both_found = 0 + for file_name, d in iter(file_info_dicts.items()): + if d['md5'] and d['file']: + both_found += 1 + elif d['file']: + files_found += 1 + elif d['md5']: + md5_found += 1 + + print("Found {0} files with matching .mnf files.".format(both_found)) + print("Found {0} .mnf files with no matching file.".format(md5_found)) + print("Found {0} files with no matching .mnf file.".format(files_found)) + + return file_info_dicts + +@Gooey( + program_name='Mnftool', + menu=[{ + 'name': 'File', + 'items': [{ + 'type': 'AboutDialog', + 'menuTitle': 'About', + 'name': 'Mnftool', + 'description': 'Python based', + 'version': '2.1', + 'copyright': '2022', + 'website': '', + 'developer': 'Michael Akridge'}] + + }] +) + + +def parse_args(): + parser = GooeyParser(description='MNF Tool Checker & Generator.') + parser.add_argument('SELECT_FILE_FOLDER', widget="DirChooser") + parser.add_argument('SELECT_OPERATION',choices=['check','generate']) + return parser.parse_args() + +def main(): + args = parse_args() + dirs = args.SELECT_FILE_FOLDER + operation = args.SELECT_OPERATION + file_info_dicts = get_file_info_dictionaries(dirs) + if operation == 'check': + # Check each pair of matching files + num_checked = 0 + errors = [] + for filename, d in sorted(iter(file_info_dicts.items())): + if d['file'] and d['md5']: + error = check_against_md5_file(filename, filename + '.mnf') + if error: + errors.append(error) + num_checked += 1 + print("===============================================================") + print("SUMMARY") + print("{0} files checked.".format(num_checked)) + print("{0} had errors.".format(len(errors))) + for (filename, expected_hash, actual_hash) in errors: + print(filename) + print(" expected hash {0}".format(expected_hash)) + print(" actual hash is {0}".format(actual_hash)) + print("===============================================================") + + elif operation == 'generate': + print(file_info_dicts) + # Generate an .md5 file for files which don't have one + for filename, d in sorted(iter(file_info_dicts.items())): + if d['file'] and not d['md5']: + generate_md5_file_for(filename, filename + '.mnf') + print("===============================================================") + + +if __name__ == "__main__": + main() diff --git a/toolbox/file-copy-tool/filecopy_tool.py b/toolbox/file-copy-tool/filecopy_tool.py new file mode 100644 index 0000000..cc1348f --- /dev/null +++ b/toolbox/file-copy-tool/filecopy_tool.py @@ -0,0 +1,45 @@ +import os +import subprocess +from pathlib import Path +# to get current date & time +from datetime import datetime +from gooey import Gooey, GooeyParser + +def do_the_copy(path1,path2,path3): + subprocess.call(["robocopy", path1 ,path2, "/mt","/XX", "/z", "/mir","/log:" + path3]) + +@Gooey(program_name='File Copy Tool', + menu=[{ + 'name': 'File', + 'items': [{ + 'type': 'AboutDialog', + 'menuTitle': 'About', + 'name': 'File Copy', + 'description': 'Python based file copy using robocopy ', + 'version': '1.0', + 'copyright': '2022', + 'website': '', + 'developer': 'Michael Akridge'}]}]) + +def parse_args(): + parser = GooeyParser(description='Folder Size Stat App.') + parser.add_argument('COPY_FROM', widget='DirChooser') + parser.add_argument('COPY_TO_PATH', widget='DirChooser') + parser.add_argument('COPY_LOG_PATH', widget='DirChooser') + return parser.parse_args() + +def main(): + args = parse_args() + current_datetime = datetime.now().strftime("%m_%d_%Y_%H%M") + pathvalue1 = args.COPY_FROM + pathvalue2 = args.COPY_TO_PATH + pathvalue3 = args.COPY_LOG_PATH + LOG_FILENAME = current_datetime + "_filecopy_log.log" + LOG_FILENAME_PATH = os.path.join(pathvalue3, LOG_FILENAME) + + do_the_copy(pathvalue1,pathvalue2,LOG_FILENAME_PATH) + print('-------------------------------------------------') + print('---- Copy Process Complete ----') + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/toolbox/folder-stats-tool/folderstats.py b/toolbox/folder-stats-tool/folderstats.py new file mode 100644 index 0000000..96dca98 --- /dev/null +++ b/toolbox/folder-stats-tool/folderstats.py @@ -0,0 +1,87 @@ +#import re +#import numpy as np +import os +import stat +import pandas as pd +import csv +from datetime import datetime +from gooey import Gooey, GooeyParser + +def get_tree_size(path): + total = 0 + for entry in os.scandir(path): + if entry.is_dir(follow_symlinks=False): + total += get_tree_size(entry.path) + else: + total += entry.stat(follow_symlinks=False).st_size + return total + +def search_folders(path): + # define empty data list to be used in file metadata script + final_datas = [] + df = pd.DataFrame() + for src_dir, dirs, files in os.walk(path): + for dir_ in dirs: + current_src = os.path.join(src_dir, dir_) + print(current_src) + print('Scanning ' + current_src) + a= get_tree_size(current_src) + # returns path size in mb + b= a/1048576 + # returns path size in gb + c=a/1073741824 + # returns path size in tb + d=a/float(1<<40) + final_data = current_src,d,c,b + final_datas.append(final_data) + print("Size: " + str(c) + " GB") + df = pd.DataFrame(final_datas) + #use break to just get first loop, root directory, stats + break + return df + +def check_slash(string): + slash_to_add = "\\" + if string and len(string) > 3: + return string + else: + newvalue = os.path.join(string,slash_to_add) + return newvalue + +@Gooey(program_name='Folder Stats', + menu=[{ + 'name': 'File', + 'items': [{ + 'type': 'AboutDialog', + 'menuTitle': 'About', + 'name': 'Folder Size Stats', + 'description': 'Python based file tree stat maker', + 'version': '1.0', + 'copyright': '2022', + 'website': '', + 'developer': 'Michael Akridge'}]}]) + +def parse_args(): + parser = GooeyParser(description='Folder Size Stat App.') + parser.add_argument('SELECT_PATH', widget='DirChooser',type=check_slash) + return parser.parse_args() + +def main(): + # create csv list of files with metadta + header = ['Path', 'Size(TB)','Size(GB)','Size(MB)'] + # setup file name & path + current_datetime = datetime.now().strftime("%m_%d_%Y_%H%M") + file_log_filename = current_datetime + '_folder_size_log.csv' + args = parse_args() + pathvalue = args.SELECT_PATH + print(pathvalue) + df=search_folders(pathvalue) + file_log_filename_path = os.path.join(pathvalue, file_log_filename) + df.to_csv(file_log_filename_path, index=False, header=header) + print('-------------------------------------------------') + print('---- Scan Complete ----') + print('---- Folder Size Log Located at ----') + print(file_log_filename_path) + +if __name__ == '__main__': + main() diff --git a/toolbox/garmin-gps-file-converter/gpxconverter.py b/toolbox/garmin-gps-file-converter/gpxconverter.py new file mode 100644 index 0000000..81633e9 --- /dev/null +++ b/toolbox/garmin-gps-file-converter/gpxconverter.py @@ -0,0 +1,62 @@ +import re +#import sys +import numpy as np +#import pandas as pd +#import datetime +from datetime import datetime, timedelta +from gooey import Gooey, GooeyParser + + +def gpxconvert(value1,value2): + fmt = "%Y/%m/%d %H:%M:%S" + header = 'ident,lat,long,time,ltime' + GPXfile = value1 + utcoff = value2 + data = open(GPXfile).read() + new_file_name_txt = GPXfile.replace('.gpx', '.txt') + waypoint_name = re.findall(r'([^\<]+)',data) + lat = re.findall(r'lat="([^\"<]+)',data) + lon = re.findall(r'lon="([^\"<]+)',data) + time = re.findall(r'