From 460b41d7aab80409c5da0517dfb96c815d7414f9 Mon Sep 17 00:00:00 2001 From: Michael Brackett <34899426+MLJBrackett@users.noreply.github.com> Date: Thu, 22 Oct 2020 22:11:50 -0400 Subject: [PATCH 1/6] added argument functionality --- jsonmerge_utils.py | 22 +++++++++++++++++----- requirements.txt | 1 + 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/jsonmerge_utils.py b/jsonmerge_utils.py index e15e84c..d166ba2 100644 --- a/jsonmerge_utils.py +++ b/jsonmerge_utils.py @@ -5,17 +5,29 @@ @author: GRENTOR """ import os +import argparse import logging #from tqdm import tqdm from merge_files import Merge + +parser = argparse.ArgumentParser(description="link-check is a broken link identifier") +parser.add_argument('-ip', help="Input prefix",required=True) +parser.add_argument('-op', help="Output prefix",required=True) +parser.add_argument('-maxFileSize', help="The maximum file size (in bytes) that each merged file should have",required=True) +parser.add_argument('-filepath', help="Path to directory where all json files are stored",required=True) +parser.add_argument('-log_level', help="The logging level - defaults to INFO [INFO, DEBUG, ERROR]", default="INFO") + +args = parser.parse_args() logger = logging.getLogger(__name__) def configure_logger(): """ configures the logger object """ - logging.basicConfig(filename='output.log', level=logging.INFO) + + logging.basicConfig(filename='output.log') if not logger.handlers: # Prevent logging from propagating to the root logger + logger.setLevel(args.log_level) logger.propagate = 0 log_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') stream_handler = logging.StreamHandler() @@ -51,14 +63,14 @@ def main(): """ try: configure_logger() - data_dir = input('Folder Path: ') + data_dir = args.filepath if not os.path.exists(data_dir): raise FolderNotFoundError - input_prefix = input('I/P Prefix: ') + input_prefix = args.ip if not os.path.isfile('{}{}'.format(path_creator(data_dir, input_prefix), '1.json')): raise FileNotFoundError - output_prefix = input('O/P Prefix: ') - max_file_size = int(input('Max File Size: ')) + output_prefix = args.op + max_file_size = int(args.maxFileSize) merge = Merge(path_creator(data_dir, input_prefix), path_creator(data_dir, output_prefix), diff --git a/requirements.txt b/requirements.txt index a4064b9..2847abf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ jsonmerge genson +argparse \ No newline at end of file From 925abe5cfd09408934a347ab8e19bbf2d61f55a5 Mon Sep 17 00:00:00 2001 From: Michael Brackett <34899426+MLJBrackett@users.noreply.github.com> Date: Thu, 22 Oct 2020 22:19:39 -0400 Subject: [PATCH 2/6] fixed pylint issues --- jsonmerge_utils.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/jsonmerge_utils.py b/jsonmerge_utils.py index d166ba2..0b47f5e 100644 --- a/jsonmerge_utils.py +++ b/jsonmerge_utils.py @@ -11,11 +11,21 @@ from merge_files import Merge parser = argparse.ArgumentParser(description="link-check is a broken link identifier") -parser.add_argument('-ip', help="Input prefix",required=True) -parser.add_argument('-op', help="Output prefix",required=True) -parser.add_argument('-maxFileSize', help="The maximum file size (in bytes) that each merged file should have",required=True) -parser.add_argument('-filepath', help="Path to directory where all json files are stored",required=True) -parser.add_argument('-log_level', help="The logging level - defaults to INFO [INFO, DEBUG, ERROR]", default="INFO") +parser.add_argument('-ip', + help="Input prefix", + required=True) +parser.add_argument('-op', + help="Output prefix", + required=True) +parser.add_argument('-maxFileSize', + help="The maximum file size (in bytes) that each merged file should have", + required=True) +parser.add_argument('-filepath', + help="Path to directory where all json files are stored", + required=True) +parser.add_argument('-log_level', + help="The logging level - defaults to INFO [INFO, DEBUG, ERROR]", + default="INFO") args = parser.parse_args() logger = logging.getLogger(__name__) @@ -23,7 +33,6 @@ def configure_logger(): """ configures the logger object """ - logging.basicConfig(filename='output.log') if not logger.handlers: # Prevent logging from propagating to the root logger From 8efde79f50b2d8c7f12417a9e894c19a313cb5ff Mon Sep 17 00:00:00 2001 From: Michael Brackett <34899426+MLJBrackett@users.noreply.github.com> Date: Thu, 22 Oct 2020 22:21:40 -0400 Subject: [PATCH 3/6] fixed pylint trailing whitespace error --- jsonmerge_utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/jsonmerge_utils.py b/jsonmerge_utils.py index 0b47f5e..95ed720 100644 --- a/jsonmerge_utils.py +++ b/jsonmerge_utils.py @@ -11,16 +11,16 @@ from merge_files import Merge parser = argparse.ArgumentParser(description="link-check is a broken link identifier") -parser.add_argument('-ip', +parser.add_argument('-ip', help="Input prefix", required=True) -parser.add_argument('-op', +parser.add_argument('-op', help="Output prefix", required=True) -parser.add_argument('-maxFileSize', +parser.add_argument('-maxFileSize', help="The maximum file size (in bytes) that each merged file should have", required=True) -parser.add_argument('-filepath', +parser.add_argument('-filepath', help="Path to directory where all json files are stored", required=True) parser.add_argument('-log_level', From 8247f6aac3f30cef60de4869711201b479508276 Mon Sep 17 00:00:00 2001 From: Michael Brackett <34899426+MLJBrackett@users.noreply.github.com> Date: Fri, 23 Oct 2020 18:43:29 -0400 Subject: [PATCH 4/6] resolved suggested changes & updated code --- jsonmerge_utils.py | 70 +++++++++++++++++++++++++++------------------- 1 file changed, 42 insertions(+), 28 deletions(-) diff --git a/jsonmerge_utils.py b/jsonmerge_utils.py index 95ed720..910142e 100644 --- a/jsonmerge_utils.py +++ b/jsonmerge_utils.py @@ -6,30 +6,47 @@ """ import os import argparse +import sys import logging #from tqdm import tqdm from merge_files import Merge -parser = argparse.ArgumentParser(description="link-check is a broken link identifier") -parser.add_argument('-ip', - help="Input prefix", - required=True) -parser.add_argument('-op', - help="Output prefix", - required=True) -parser.add_argument('-maxFileSize', - help="The maximum file size (in bytes) that each merged file should have", - required=True) -parser.add_argument('-filepath', - help="Path to directory where all json files are stored", - required=True) -parser.add_argument('-log_level', - help="The logging level - defaults to INFO [INFO, DEBUG, ERROR]", - default="INFO") +def get_parser(): + parser = argparse.ArgumentParser( + description="jsonmergeutils merges different JSON files into a single JSON object in a new file", + add_help=True) + parser.add_argument('-ip', + dest = "input_prefix", + help="Input prefix", + required=True) + parser.add_argument('-op', + dest = "output_prefix", + help="Output prefix", + required=True) + parser.add_argument('-maxFileSize', + dest = "max_file_size", + help="The maximum file size (in bytes) that each merged file should have", + type=int, + required=True + ) + parser.add_argument('-dir_path', + dest ="data_dir", + help="Path to directory where all json files are stored", + required=True) + parser.add_argument('-log_level', + dest="log_level", + help="The logging level - defaults to INFO", + metavar = "{'INFO','DEBUG','ERROR'}", + choices = ['INFO', 'DEBUG', 'ERROR'], + default="INFO") + if len(sys.argv) == 1: + parser.print_help(sys.stderr) + sys.exit(1) + return parser.parse_args() + -args = parser.parse_args() logger = logging.getLogger(__name__) -def configure_logger(): +def configure_logger(args): """ configures the logger object """ @@ -71,19 +88,16 @@ def main(): Driver Function """ try: - configure_logger() - data_dir = args.filepath - if not os.path.exists(data_dir): + args = get_parser() + configure_logger(args) + if not os.path.exists(args.data_dir): raise FolderNotFoundError - input_prefix = args.ip - if not os.path.isfile('{}{}'.format(path_creator(data_dir, input_prefix), '1.json')): + if not os.path.isfile('{}{}'.format(path_creator(args.data_dir, args.input_prefix), '1.json')): raise FileNotFoundError - output_prefix = args.op - max_file_size = int(args.maxFileSize) - merge = Merge(path_creator(data_dir, input_prefix), - path_creator(data_dir, output_prefix), - max_file_size) + merge = Merge(path_creator(args.data_dir, args.input_prefix), + path_creator(args.data_dir, args.output_prefix), + args.max_file_size) merge.merge() From 5b5cae27e6943327febcfae6d95d1d1839d75a06 Mon Sep 17 00:00:00 2001 From: Michael Brackett <34899426+MLJBrackett@users.noreply.github.com> Date: Fri, 23 Oct 2020 18:48:21 -0400 Subject: [PATCH 5/6] fixed pylint build issues --- jsonmerge_utils.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/jsonmerge_utils.py b/jsonmerge_utils.py index 910142e..ace702b 100644 --- a/jsonmerge_utils.py +++ b/jsonmerge_utils.py @@ -12,8 +12,11 @@ from merge_files import Merge def get_parser(): + """ + gets & parses arguments from command line + """ parser = argparse.ArgumentParser( - description="jsonmergeutils merges different JSON files into a single JSON object in a new file", + description="Merges different JSON files into a single JSON object in a new file", add_help=True) parser.add_argument('-ip', dest = "input_prefix", @@ -92,7 +95,8 @@ def main(): configure_logger(args) if not os.path.exists(args.data_dir): raise FolderNotFoundError - if not os.path.isfile('{}{}'.format(path_creator(args.data_dir, args.input_prefix), '1.json')): + if not os.path.isfile('{}{}'.format + (path_creator(args.data_dir, args.input_prefix), '1.json')): raise FileNotFoundError merge = Merge(path_creator(args.data_dir, args.input_prefix), From 93bd6ee7fb3fa273046788971a42f52e05326bd1 Mon Sep 17 00:00:00 2001 From: Michael Brackett <34899426+MLJBrackett@users.noreply.github.com> Date: Thu, 29 Oct 2020 22:01:36 -0400 Subject: [PATCH 6/6] fixed argument being passed to configure_logger function --- jsonmerge_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/jsonmerge_utils.py b/jsonmerge_utils.py index ace702b..d52b0c4 100644 --- a/jsonmerge_utils.py +++ b/jsonmerge_utils.py @@ -49,14 +49,14 @@ def get_parser(): logger = logging.getLogger(__name__) -def configure_logger(args): +def configure_logger(log_level): """ configures the logger object """ logging.basicConfig(filename='output.log') if not logger.handlers: # Prevent logging from propagating to the root logger - logger.setLevel(args.log_level) + logger.setLevel(log_level) logger.propagate = 0 log_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') stream_handler = logging.StreamHandler() @@ -92,7 +92,7 @@ def main(): """ try: args = get_parser() - configure_logger(args) + configure_logger(args.log_level) if not os.path.exists(args.data_dir): raise FolderNotFoundError if not os.path.isfile('{}{}'.format