diff --git a/docs/cli.rst b/docs/cli.rst index fa469c1..4a67d6f 100644 --- a/docs/cli.rst +++ b/docs/cli.rst @@ -27,7 +27,7 @@ See :doc:`data-process` for detail. .. option:: --include=DIR, -I DIR Add search locations for item specified as relative paths. - See also :envvar:`YP_INCLUDE_PATHS`. + See also :envvar:`YP_INCLUDE_PATH`. .. option:: --define=KEY=VALUE, -D KEY=VALUE @@ -141,7 +141,7 @@ The following options apply to both :program:`yp-data`, Environment Variables --------------------- -.. envvar:: YP_INCLUDE_PATHS +.. envvar:: YP_INCLUDE_PATH Set the search path for include files (that are specified as relative locations). Expect a list of folders/directories in the same syntax as diff --git a/environment.yml b/environment.yml index c1586ae..c354b1c 100644 --- a/environment.yml +++ b/environment.yml @@ -7,7 +7,7 @@ dependencies: - flake8 - jmespath - jsonschema - - sphinx + - sphinx<8 - sphinx_rtd_theme - pytest - pytest-cov diff --git a/requirements.txt b/requirements.txt index 52bc11e..d8125b8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,5 +2,5 @@ jmespath jsonschema python-dateutil ruamel.yaml -sphinx +sphinx<8 sphinx-rtd-theme diff --git a/src/yamlprocessor/__init__.py b/src/yamlprocessor/__init__.py index 4e93917..8ea1131 100644 --- a/src/yamlprocessor/__init__.py +++ b/src/yamlprocessor/__init__.py @@ -1,2 +1,2 @@ """Process YAML include files and variable substitutions.""" -__version__ = '0.6' +__version__ = '0.6.1' diff --git a/src/yamlprocessor/dataprocess.py b/src/yamlprocessor/dataprocess.py index 7eef98c..df33659 100755 --- a/src/yamlprocessor/dataprocess.py +++ b/src/yamlprocessor/dataprocess.py @@ -55,7 +55,7 @@ from . import __version__ -def configure_basic_logging(): +def configure_basic_logging(level=logging.INFO): """Configure basic logging, suitable for most CLI applications. Basic no-frill format. @@ -80,7 +80,7 @@ def configure_basic_logging(): }, 'handlers': { 'default': { - 'level': 'DEBUG', + 'level': level, 'formatter': 'basic', 'class': 'logging.StreamHandler', }, @@ -88,12 +88,12 @@ def configure_basic_logging(): 'loggers': { '': { # root logger 'handlers': ['default'], - 'level': 'INFO', + 'level': level, 'propagate': False, }, '__main__': { # if __name__ == '__main__' 'handlers': ['default'], - 'level': 'DEBUG', + 'level': level, 'propagate': False, }, } @@ -299,6 +299,7 @@ class DataProcessor: r"_FORMAT_(?P\w+)", re.M | re.S) + TIME_FORMAT_DEFAULT = '%FT%T%:z' UNBOUND_ORIGINAL = 'YP_ORIGINAL' def __init__(self): @@ -310,7 +311,7 @@ def __init__(self): if item) self.include_dict = {} self.schema_prefix = os.getenv('YP_SCHEMA_PREFIX') - self.time_formats = {'': '%FT%T%:z'} + self.time_formats = {'': self.TIME_FORMAT_DEFAULT} self.time_now = datetime.now(tzlocal()) # assume application is fast time_ref_value = os.getenv('YP_TIME_REF_VALUE') if time_ref_value is None: @@ -320,6 +321,29 @@ def __init__(self): self.variable_map = os.environ.copy() self.unbound_placeholder = None + def log_settings(self): + """Log (info) current settings of the processor.""" + if self.is_process_include and self.include_paths: + logging.info( + 'YP_INCLUDE_PATH=%s', + os.pathsep.join(self.include_paths), + ) + if self.is_process_variable: + if self.time_now != self.time_ref: + logging.info( + 'YP_TIME_REF_VALUE=%s', + strftime_with_colon_z( + self.time_ref, + self.time_formats[''], + ), + ) + if self.time_formats: + for key, time_format in self.time_formats.items(): + if key: + logging.info('YP_TIME_FORMAT_%s=%s', key, time_format) + elif time_format != self.TIME_FORMAT_DEFAULT: + logging.info('YP_TIME_FORMAT=%s', time_format) + def process_data( self, in_filenames: Union[str, Iterable[str]], @@ -335,15 +359,23 @@ def process_data( if isinstance(in_filenames, str): filename = self.get_filename(in_filenames, []) root = self.load_file(filename) + logging.info('< %s', filename) schema_location = self.load_file_schema(filename) root_filenames = [filename] else: root_filenames = [] with SpooledTemporaryFile(mode='w+') as concat_file: + if not in_filenames: + in_filenames = ['-'] for filename in in_filenames: filename = self.get_filename(filename, []) - with open(filename) as file_: - concat_file.write(file_.read()) + if filename == '-': + concat_file.write(sys.stdin.read()) + sys.stdin.close() + else: + with open(filename) as file_: + concat_file.write(file_.read()) + logging.info('< %s', filename) root_filenames.append(filename) concat_file.seek(0) root = self.load_file(concat_file) @@ -486,6 +518,7 @@ def load_include_file( filename = self.get_filename( include_filename, parent_filenames) loaded_value = self.load_file(filename) + logging.info('< %s %s', '+' * len(parent_filenames), filename) parent_filenames.append(filename) if self.VARIABLES_KEY in value: variable_map.update(value[self.VARIABLES_KEY]) @@ -847,6 +880,12 @@ def main(argv=None): action='store_false', default=True, help='Do not process variable substitutions') + parser.add_argument( + '--quiet', '-q', + dest='is_quiet_mode', + action='store_true', + default=False, + help='Reduce diagnostic message verbosity') parser.add_argument( '--schema-prefix', metavar='PREFIX', @@ -884,6 +923,8 @@ def main(argv=None): if args.is_print_version: parser.exit(0, f'{parser.prog} {__version__}\n') + if args.is_quiet_mode: + configure_basic_logging(level=logging.ERROR) # Set up processor processor = DataProcessor() @@ -929,6 +970,7 @@ def main(argv=None): args.out_filename = args.filenames.pop() elif args.out_filename is None: args.out_filename = '-' + processor.log_settings() processor.process_data(args.filenames, args.out_filename) diff --git a/src/yamlprocessor/tests/test_dataprocess.py b/src/yamlprocessor/tests/test_dataprocess.py index 363ca47..51a3ba2 100644 --- a/src/yamlprocessor/tests/test_dataprocess.py +++ b/src/yamlprocessor/tests/test_dataprocess.py @@ -1,3 +1,4 @@ +import io import json from dateutil.parser import parse as datetimeparse @@ -208,18 +209,22 @@ def test_main_0(tmp_path, yaml): assert yaml.load(outfilename.open()) == data -def test_main_1(tmp_path, yaml): +def test_main_1(capsys, tmp_path, yaml): """Test main, single include.""" data = {'testing': [1, 2, 3]} data_0 = {'testing': [{'INCLUDE': '1.yaml'}, 2, 3]} infilename = tmp_path / 'a.yaml' with infilename.open('w') as infile: yaml.dump(data_0, infile) - with (tmp_path / '1.yaml').open('w') as infile_1: + infilename_1 = tmp_path / '1.yaml' + with (infilename_1).open('w') as infile_1: yaml.dump(1, infile_1) outfilename = tmp_path / 'b.yaml' main([str(infilename), str(outfilename)]) assert yaml.load(outfilename.open()) == data + captured = capsys.readouterr() + assert f'[INFO] < {infilename}' in captured.err.splitlines() + assert f'[INFO] < + {infilename_1}' in captured.err.splitlines() def test_main_3(tmp_path, yaml): @@ -289,7 +294,7 @@ def test_main_6(tmp_path, yaml): assert yaml.load(outfilename.open()) == data -def test_main_7(tmp_path, yaml): +def test_main_7(capsys, tmp_path, yaml): """Test main, include files in a separate folder.""" data = {'testing': [1, 2, {3: [3.1, 3.14]}]} data_0 = {'testing': [{'INCLUDE': '1.yaml'}, 2, {'INCLUDE': '3.yaml'}]} @@ -298,15 +303,24 @@ def test_main_7(tmp_path, yaml): yaml.dump(data_0, infile) include_d = tmp_path / 'include' include_d.mkdir() - with (include_d / '1.yaml').open('w') as infile_1: + include_1 = include_d / '1.yaml' + with (include_1).open('w') as infile_1: yaml.dump(1, infile_1) - with (include_d / '3.yaml').open('w') as infile_3: + include_3 = include_d / '3.yaml' + with (include_3).open('w') as infile_3: yaml.dump({3: {'INCLUDE': '3x.yaml'}}, infile_3) - with (include_d / '3x.yaml').open('w') as infile_3x: + include_3x = include_d / '3x.yaml' + with (include_3x).open('w') as infile_3x: yaml.dump([3.1, 3.14], infile_3x) outfilename = tmp_path / 'b.yaml' main(['-I', str(include_d), str(infilename), str(outfilename)]) assert yaml.load(outfilename.open()) == data + captured = capsys.readouterr() + assert f'[INFO] YP_INCLUDE_PATH={include_d}' in captured.err.splitlines() + assert f'[INFO] < {infilename}' in captured.err.splitlines() + assert f'[INFO] < + {include_1}' in captured.err.splitlines() + assert f'[INFO] < + {include_3}' in captured.err.splitlines() + assert f'[INFO] < ++ {include_3x}' in captured.err.splitlines() def test_main_8(tmp_path, yaml): @@ -559,6 +573,34 @@ def test_main_15(tmp_path, yaml): } +def test_main_16(capsys, monkeypatch): + """Test main, positional argument -, so read sys.stdin.""" + monkeypatch.setattr('sys.stdin', io.StringIO('hello: world\n')) + main(['-o-', '-']) + captured = capsys.readouterr() + assert captured.out.splitlines() == ['hello: world'] + + +def test_main_17(capsys, monkeypatch): + """Test main, no positional argument, so read sys.stdin.""" + monkeypatch.setattr('sys.stdin', io.StringIO('hello: world\n')) + main(['-o-']) + captured = capsys.readouterr() + assert captured.out.splitlines() == ['hello: world'] + + +def test_main_18(capsys, monkeypatch): + """Test main, set reference time.""" + monkeypatch.setattr('sys.stdin', io.StringIO('time: ${YP_TIME_REF}\n')) + main(['-o-', '--time-ref=2028-02-29T13:48:50Z']) + captured = capsys.readouterr() + assert captured.out.splitlines() == ["time: '2028-02-29T13:48:50Z'"] + assert ( + '[INFO] YP_TIME_REF_VALUE=2028-02-29T13:48:50Z' + in captured.err.splitlines() + ) + + def test_main_validate_1(tmp_path, capsys, yaml): """Test main, YAML with JSON schema validation.""" schema = {