From 3668b2b2f72929c90a391ed5d82f587382b5a375 Mon Sep 17 00:00:00 2001 From: evidencebp Date: Mon, 11 Nov 2024 19:10:47 +0200 Subject: [PATCH 01/10] plugins\create_package.py broad-exception-caught The function _create_package uses os.mkdir and catch exception. This is too wide since mkdir catches specific exception. That might catch and hide new exception (e.g., in case that more code will be added to the try section). For the mkdir exceptions see: https://docs.python.org/3/library/os.html#os.mkdir --- plugins/create_package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/plugins/create_package.py b/plugins/create_package.py index 61557b27..788284f3 100644 --- a/plugins/create_package.py +++ b/plugins/create_package.py @@ -37,7 +37,9 @@ def _create_package(name): os.mkdir(path) except FileExistsError: logger.error("Path exists already: %r", path) - except Exception: + except FileNotFoundError: + logger.error("Parent path does not exist: %r", path) + except OSError: logger.exception("Unknown error while creating path %r", path) else: return path From a15507c4531a847fb2a25c4b204a671ff82147aa Mon Sep 17 00:00:00 2001 From: evidencebp Date: Mon, 11 Nov 2024 19:28:21 +0200 Subject: [PATCH 02/10] plugins\new_resource_file\__init__.py superfluous-parens Removed unneeded parentheses in return statement --- plugins/new_resource_file/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/new_resource_file/__init__.py b/plugins/new_resource_file/__init__.py index 97cf6445..91307e0e 100644 --- a/plugins/new_resource_file/__init__.py +++ b/plugins/new_resource_file/__init__.py @@ -116,4 +116,4 @@ def _is_package_path(self, file_path): for fp in (real_file_path, file_path): if fp.startswith(pp): leaf = fp[len(pp):].strip(os.sep) - return (os.sep not in leaf) + return os.sep not in leaf From 931b6fde5a7209e723be1541263d6d725ae3781c Mon Sep 17 00:00:00 2001 From: evidencebp Date: Tue, 12 Nov 2024 13:34:46 +0200 Subject: [PATCH 03/10] plugins\command_completions\__init__.py superfluous-parens Removed unneeded parenthesis in python_arg_scope = ("source.python meta.function-call.arguments.python string.quoted") I suspected that the other might intended to create a tuple. To create a single element tuple you need a comma at the end like python_arg_scope = ("source.python meta.function-call.arguments.python string.quoted",) So python_arg_scope was a string anyway Also in line 262, in quite a similar code, there are no parenthesis. --- plugins/command_completions/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/command_completions/__init__.py b/plugins/command_completions/__init__.py index 43c1a93c..b2618376 100644 --- a/plugins/command_completions/__init__.py +++ b/plugins/command_completions/__init__.py @@ -165,7 +165,7 @@ class SublimeTextCommandCompletionPythonListener(sublime_plugin.EventListener): @inhibit_word_completions def on_query_completions(self, view, prefix, locations): loc = locations[0] - python_arg_scope = ("source.python meta.function-call.arguments.python string.quoted") + python_arg_scope = "source.python meta.function-call.arguments.python string.quoted" if not view.score_selector(loc, python_arg_scope) or not is_plugin(view): return None From 82a16c67fcfa47f9be671b587afc046c0ee14f26 Mon Sep 17 00:00:00 2001 From: evidencebp Date: Tue, 12 Nov 2024 19:16:17 +0200 Subject: [PATCH 04/10] plugins\file_conversion.py too-many-return-statements Method run of class had 12 return statements (it is recommended by pylint not to have more than 6). I extracted methods that contains these statements. Since a return exit the function and a return in the new function will not exit from the caller, in some cases the result is checked in the caller, which returns if needed. --- plugins/file_conversion.py | 158 +++++++++++++++++++++++++++---------- 1 file changed, 117 insertions(+), 41 deletions(-) diff --git a/plugins/file_conversion.py b/plugins/file_conversion.py index 7f1a6629..6c4b0d89 100644 --- a/plugins/file_conversion.py +++ b/plugins/file_conversion.py @@ -14,6 +14,7 @@ __all__ = ('PackagedevConvertCommand',) + # build command class PackagedevConvertCommand(sublime_plugin.WindowCommand): """Convert a file (view's buffer) of type ``source_format`` to type @@ -47,6 +48,104 @@ class PackagedevConvertCommand(sublime_plugin.WindowCommand): kwargs={"target_format": "yaml", "default_flow_style": False}) ) + def _auto_detect_file_type(self, source_format, target_format, output): + """Available parameters: + + source_format (str) = None + The source format. Any of "yaml", "plist" or "json". + If `None`, attempt to automatically detect the format by extension, used syntax + highlight or (with plist) the actual contents. + + target_format (str) = None + The target format. Any of "yaml", "plist" or "json". + If `None`, attempt to find an option set in the file to parse. + If unable to find an option, ask the user directly with all available format options. + output (OutputPanel) = None + """ + + type_handling = None + + # Auto-detect the file type if it's not specified + if not source_format: + output.write("Input type not specified, auto-detecting...") + for Loader in loaders.get.values(): + if Loader.file_is_valid(self.view): + source_format = Loader.ext + output.print(' %s\n' % Loader.name) + break + + if not source_format: + type_handling = output.print("\nUnable to detect file type.") + elif target_format == source_format: + type_handling = output.print("File already is %s." % Loader.name) + + return type_handling + + def _validate_run(self, source_format=None, target_format=None): + """Available parameters: + + source_format (str) = None + The source format. Any of "yaml", "plist" or "json". + If `None`, attempt to automatically detect the format by extension, used syntax + highlight or (with plist) the actual contents. + + target_format (str) = None + The target format. Any of "yaml", "plist" or "json". + If `None`, attempt to find an option set in the file to parse. + If unable to find an option, ask the user directly with all available format options. + """ + + result = False + + # Check the environment (view, args, ...) + if self.view.is_dirty(): + # Save the file so that source and target file on the drive don't differ + self.view.run_command("save") + if self.view.is_dirty(): + result = sublime.error_message("The file could not be saved correctly. " + "The build was aborted") + elif source_format and target_format == source_format: + result = True + self.status("Target and source file format are identical. (%s)" % target_format) + + elif source_format and source_format not in loaders.get: + result = True + self.status("Loader for '%s' not supported/implemented." % source_format) + + elif target_format and target_format not in dumpers.get: + result = True + self.status("Dumper for '%s' not supported/implemented." % target_format) + + return result + + def _revalidate_run(self, output, source_format=None, target_format=None,): + """Available parameters: + + source_format (str) = None + The source format. Any of "yaml", "plist" or "json". + If `None`, attempt to automatically detect the format by extension, used syntax + highlight or (with plist) the actual contents. + + target_format (str) = None + The target format. Any of "yaml", "plist" or "json". + If `None`, attempt to find an option set in the file to parse. + If unable to find an option, ask the user directly with all available format options. + output (OutputPanel) = None + """ + result = None + # Validate the shit again, but this time print to output panel + if source_format is not None and target_format == source_format: + result = output.print("\nTarget and source file format are identical. (%s)" + % target_format) + + if target_format not in dumpers.get: + result = output.print("\nDumper for '%s' not supported/implemented." + % target_format) + + return result + + + def run(self, source_format=None, target_format=None, ext=None, open_new_file=False, rearrange_yaml_syntax_def=False, _output=None, **kwargs): """Available parameters: @@ -96,47 +195,24 @@ def run(self, source_format=None, target_format=None, ext=None, """ self.view = self.window.active_view() - # Check the environment (view, args, ...) - if self.view.is_dirty(): - # Save the file so that source and target file on the drive don't differ - self.view.run_command("save") - if self.view.is_dirty(): - return sublime.error_message("The file could not be saved correctly. " - "The build was aborted") - + result = self._validate_run(self, source_format, target_format) + if result: + return result + file_path = self.view.file_name() if not file_path: return self.status("File does not exist.", file_path) file_path = Path(file_path) - if source_format and target_format == source_format: - return self.status("Target and source file format are identical. (%s)" % target_format) - - if source_format and source_format not in loaders.get: - return self.status("Loader for '%s' not supported/implemented." % source_format) - - if target_format and target_format not in dumpers.get: - return self.status("Dumper for '%s' not supported/implemented." % target_format) - # Now the actual "building" starts (collecting remaining parameters) with OutputPanel.create(self.window, "package_dev", read_only=True, force_writes=True) as output: output.show() - # Auto-detect the file type if it's not specified - if not source_format: - output.write("Input type not specified, auto-detecting...") - for Loader in loaders.get.values(): - if Loader.file_is_valid(self.view): - source_format = Loader.ext - output.print(' %s\n' % Loader.name) - break - - if not source_format: - return output.print("\nUnable to detect file type.") - elif target_format == source_format: - return output.print("File already is %s." % Loader.name) - + type_handling = self._auto_detect_file_type(source_format, target_format, output) + if type_handling: + return type_handling + # Load inline options Loader = loaders.get[source_format] opts = Loader.load_options(self.view) @@ -191,14 +267,12 @@ def on_select(index): return target_format = opts['target_format'] - # Validate the shit again, but this time print to output panel - if source_format is not None and target_format == source_format: - return output.print("\nTarget and source file format are identical. (%s)" - % target_format) - - if target_format not in dumpers.get: - return output.print("\nDumper for '%s' not supported/implemented." - % target_format) + result = self._revalidate_run(self, + output, + source_format, + target_format) + if result: + return result output.print(' %s\n' % dumpers.get[target_format].name) @@ -214,16 +288,18 @@ def on_select(index): output.print("Unexpected error occurred while parsing, " "please see the console for details.") raise - if not data: - return # Determine new file name new_file_path = file_path.with_suffix(get_new_ext(target_format)) new_dir = new_file_path.parent + valid_path = True try: os.makedirs(str(new_dir), exist_ok=True) except OSError: output.print("Could not create folder '%s'" % new_dir) + valid_path = False + + if not data or not valid_path: return # Now dump to new file From 55eeec9f9395752903a54578abda903eadb3059e Mon Sep 17 00:00:00 2001 From: evidencebp Date: Tue, 12 Nov 2024 19:46:10 +0200 Subject: [PATCH 05/10] plugins\syntax_dev\completions.py too-many-return-statements Function match_selector had 7 return statements while pylint recommends to have at most 6. I assigned the return values into the result variable and use a single statement returning in the end of the function. --- plugins/syntax_dev/completions.py | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/plugins/syntax_dev/completions.py b/plugins/syntax_dev/completions.py index f363ccd1..d55ec800 100644 --- a/plugins/syntax_dev/completions.py +++ b/plugins/syntax_dev/completions.py @@ -232,42 +232,47 @@ def match_selector(selector, offset=0): return all(self.view.match_selector(point + offset, selector) for point in locations) + result = None + # None of our business if not match_selector("- comment - (source.regexp - keyword.other.variable)"): - return None + result = None # Scope name completions based on our scope_data database - if match_selector("meta.expect-scope, meta.scope", -1): - return self._complete_scope(prefix, locations) + elif match_selector("meta.expect-scope, meta.scope", -1): + result = self._complete_scope(prefix, locations) # Auto-completion for include values using the 'contexts' keys and for - if match_selector( + elif match_selector( "meta.expect-context-list-or-content | meta.context-list-or-content", -1, ): - return ((self._complete_keyword(prefix, locations) or []) + result = ((self._complete_keyword(prefix, locations) or []) + self._complete_context(prefix, locations)) # Auto-completion for include values using the 'contexts' keys - if match_selector( + elif match_selector( "meta.expect-context-list | meta.expect-context | meta.include | meta.context-list", -1, ): - return self._complete_context(prefix, locations) or None + result = self._complete_context(prefix, locations) or None # Auto-completion for branch points with 'fail' key - if match_selector( + elif match_selector( "meta.expect-branch-point-reference | meta.branch-point-reference", -1, ): - return self._complete_branch_point() + result = self._complete_branch_point() # Auto-completion for variables in match patterns using 'variables' keys - if match_selector("keyword.other.variable"): - return self._complete_variable() + elif match_selector("keyword.other.variable"): + result = self._complete_variable() - # Standard completions for unmatched regions - return self._complete_keyword(prefix, locations) + else: + # Standard completions for unmatched regions + result = self._complete_keyword(prefix, locations) + + return result def _line_prefix(self, point): _, col = self.view.rowcol(point) From 6b2349272fde43fbd992a4c3a3f02727e96aff9d Mon Sep 17 00:00:00 2001 From: evidencebp Date: Tue, 12 Nov 2024 20:55:10 +0200 Subject: [PATCH 06/10] plugins\syntax_dev_legacy.py too-many-branches The method on_query_completions of the class LegacySyntaxDefCompletions had 17 branches while pylint recommends to have at most 12. I extracted methods to structure more the code. --- plugins/syntax_dev_legacy.py | 110 ++++++++++++++++++++++------------- 1 file changed, 69 insertions(+), 41 deletions(-) diff --git a/plugins/syntax_dev_legacy.py b/plugins/syntax_dev_legacy.py index 0f666345..e77aa5d6 100644 --- a/plugins/syntax_dev_legacy.py +++ b/plugins/syntax_dev_legacy.py @@ -122,7 +122,6 @@ def dump(self, data, sort=True, sort_order=None, sort_numeric=True, *args, **kwa self.output.print("Dumping %s..." % self.name) return yaml.dump(data, **params) - class PackagedevRearrangeYamlSyntaxDefCommand(sublime_plugin.TextCommand): """Parses YAML and sorts all the dict keys reasonably. Does not write to the file, only to the buffer. @@ -317,6 +316,11 @@ def filter_pattern_regs(reg): ############################################################################### +def inhibit(ret): + return (ret, sublime.INHIBIT_WORD_COMPLETIONS) + + + class LegacySyntaxDefCompletions(sublime_plugin.EventListener): def __init__(self): base_keys = "match,end,begin,name,contentName,comment,scopeName,include".split(',') @@ -334,21 +338,74 @@ def __init__(self): self.base_completions = completions + def _validate_query_completions(self, locations, view): + + if len(locations) > 1: + result = [] + # Do not bother if not in yaml-tmlanguage scope and within or at the end of a comment + elif not view.match_selector(locations[0] + , "source.yaml-tmlanguage - comment"): + result = [] + else: + result = None + + return result + + def _browse_nodes(self, nodes, tokens, window): + + node = None + # Browse the nodes and their children + for i, token in enumerate(tokens): + node = nodes.find(token) + if not node: + status( + "Warning: `%s` not found in scope naming conventions" + % '.'.join(tokens[:i + 1]), + window + ) + break + nodes = node.children + if not nodes: + break + + return node + + def _autocomplete(self, view, loc, window): + + # Auto-completion for include values using the repository keys + if view.match_selector(loc, "meta.include meta.value string, variable.other.include"): + # Search for the whole include string which contains the current location + reg = extract_selector(view, "meta.include meta.value string", loc) + include_text = view.substr(reg) + + if ( + not reg + or (not include_text.startswith("'#") + and not include_text.startswith('"#')) + ): + return [] + + variables = [view.substr(r) + for r in view.find_by_selector("variable.other.repository-key")] + status( + "Found %d local repository keys to be used in includes" % len(variables), + window + ) + return inhibit(zip(variables, variables)) + + return None + def on_query_completions(self, view, prefix, locations): # We can't work with multiple selections here window = view.window() - if len(locations) > 1: - return [] + result = self._validate_query_completions(locations, view) + if result: + return result loc = locations[0] - # Do not bother if not in yaml-tmlanguage scope and within or at the end of a comment - if not view.match_selector(loc, "source.yaml-tmlanguage - comment"): - return [] - def inhibit(ret): - return (ret, sublime.INHIBIT_WORD_COMPLETIONS) # Extend numerics into `'123': {name: $0}`, as used in captures, # but only if they are not in a string scope @@ -373,20 +430,8 @@ def inhibit(ret): if len(tokens) > 1: del tokens[-1] # The last token is either incomplete or empty - # Browse the nodes and their children nodes = COMPILED_HEADS - for i, token in enumerate(tokens): - node = nodes.find(token) - if not node: - status( - "Warning: `%s` not found in scope naming conventions" - % '.'.join(tokens[:i + 1]), - window - ) - break - nodes = node.children - if not nodes: - break + node = self._browse_nodes(nodes, tokens, window) if nodes and node: return inhibit(nodes.to_completion()) @@ -416,26 +461,9 @@ def inhibit(ret): # Due to "." being set as a trigger this should not be computed after the block above return [] - # Auto-completion for include values using the repository keys - if view.match_selector(loc, "meta.include meta.value string, variable.other.include"): - # Search for the whole include string which contains the current location - reg = extract_selector(view, "meta.include meta.value string", loc) - include_text = view.substr(reg) - - if ( - not reg - or (not include_text.startswith("'#") - and not include_text.startswith('"#')) - ): - return [] - - variables = [view.substr(r) - for r in view.find_by_selector("variable.other.repository-key")] - status( - "Found %d local repository keys to be used in includes" % len(variables), - window - ) - return inhibit(zip(variables, variables)) + auto_complete = self._autocomplete(self, view, loc, window) + if auto_complete: + return auto_complete # Do not bother if the syntax def already matched the current position, # except in the main repository From 4d138d6c242d3ffd9ab0970e47f0d11fc823030d Mon Sep 17 00:00:00 2001 From: evidencebp Date: Mon, 18 Nov 2024 13:35:27 +0200 Subject: [PATCH 07/10] plugins\lib\fileconv\dumpers.py unnecessary-pass The methods validate_data and write of DumperProto should be overridden by subclasses (as discussed with FichteFoll). Instead of leaving with just pass changed to raise NotImplementedError to verify that they are not mistakenly used. --- plugins/lib/fileconv/dumpers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins/lib/fileconv/dumpers.py b/plugins/lib/fileconv/dumpers.py index 4454bafe..f9501ba5 100644 --- a/plugins/lib/fileconv/dumpers.py +++ b/plugins/lib/fileconv/dumpers.py @@ -97,7 +97,8 @@ def validate_data(self, data, *args, **kwargs): (lambda x: x is None, False)) ] """ - pass + raise NotImplementedError + def _validate_data(self, data, funcs): """Check for incompatible data recursively. @@ -178,7 +179,7 @@ def dump(self, data, *args, **kwargs): def write(self, data, *args, **kwargs): """To be implemented.""" - pass + raise NotImplementedError class JSONDumper(DumperProto): From d91194c1f572d2fcda326f46d74c0e4de6b1015d Mon Sep 17 00:00:00 2001 From: evidencebp Date: Mon, 18 Nov 2024 13:37:46 +0200 Subject: [PATCH 08/10] plugins\lib\fileconv\loaders.py unnecessary-pass The method parse of LoaderProto should be overridden by subclasses (as discussed with FichteFoll). Instead of leaving with just pass changed to raise NotImplementedError to verify that they are not mistakenly used. --- plugins/lib/fileconv/loaders.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/lib/fileconv/loaders.py b/plugins/lib/fileconv/loaders.py index aa66564a..cd3254a9 100644 --- a/plugins/lib/fileconv/loaders.py +++ b/plugins/lib/fileconv/loaders.py @@ -295,7 +295,8 @@ def parse(self, *args, **kwargs): """To be implemented. Should return the parsed data from ``self.file_path`` as a Python object. """ - pass + raise NotImplementedError + class JSONLoader(LoaderProto): From 4bd557782ba294d11196efcaf13ffdc7b02b1c87 Mon Sep 17 00:00:00 2001 From: evidencebp Date: Mon, 18 Nov 2024 13:56:56 +0200 Subject: [PATCH 09/10] Revert "plugins\file_conversion.py too-many-return-statements" This reverts commit 82a16c67fcfa47f9be671b587afc046c0ee14f26. --- plugins/file_conversion.py | 158 ++++++++++--------------------------- 1 file changed, 41 insertions(+), 117 deletions(-) diff --git a/plugins/file_conversion.py b/plugins/file_conversion.py index 6c4b0d89..7f1a6629 100644 --- a/plugins/file_conversion.py +++ b/plugins/file_conversion.py @@ -14,7 +14,6 @@ __all__ = ('PackagedevConvertCommand',) - # build command class PackagedevConvertCommand(sublime_plugin.WindowCommand): """Convert a file (view's buffer) of type ``source_format`` to type @@ -48,104 +47,6 @@ class PackagedevConvertCommand(sublime_plugin.WindowCommand): kwargs={"target_format": "yaml", "default_flow_style": False}) ) - def _auto_detect_file_type(self, source_format, target_format, output): - """Available parameters: - - source_format (str) = None - The source format. Any of "yaml", "plist" or "json". - If `None`, attempt to automatically detect the format by extension, used syntax - highlight or (with plist) the actual contents. - - target_format (str) = None - The target format. Any of "yaml", "plist" or "json". - If `None`, attempt to find an option set in the file to parse. - If unable to find an option, ask the user directly with all available format options. - output (OutputPanel) = None - """ - - type_handling = None - - # Auto-detect the file type if it's not specified - if not source_format: - output.write("Input type not specified, auto-detecting...") - for Loader in loaders.get.values(): - if Loader.file_is_valid(self.view): - source_format = Loader.ext - output.print(' %s\n' % Loader.name) - break - - if not source_format: - type_handling = output.print("\nUnable to detect file type.") - elif target_format == source_format: - type_handling = output.print("File already is %s." % Loader.name) - - return type_handling - - def _validate_run(self, source_format=None, target_format=None): - """Available parameters: - - source_format (str) = None - The source format. Any of "yaml", "plist" or "json". - If `None`, attempt to automatically detect the format by extension, used syntax - highlight or (with plist) the actual contents. - - target_format (str) = None - The target format. Any of "yaml", "plist" or "json". - If `None`, attempt to find an option set in the file to parse. - If unable to find an option, ask the user directly with all available format options. - """ - - result = False - - # Check the environment (view, args, ...) - if self.view.is_dirty(): - # Save the file so that source and target file on the drive don't differ - self.view.run_command("save") - if self.view.is_dirty(): - result = sublime.error_message("The file could not be saved correctly. " - "The build was aborted") - elif source_format and target_format == source_format: - result = True - self.status("Target and source file format are identical. (%s)" % target_format) - - elif source_format and source_format not in loaders.get: - result = True - self.status("Loader for '%s' not supported/implemented." % source_format) - - elif target_format and target_format not in dumpers.get: - result = True - self.status("Dumper for '%s' not supported/implemented." % target_format) - - return result - - def _revalidate_run(self, output, source_format=None, target_format=None,): - """Available parameters: - - source_format (str) = None - The source format. Any of "yaml", "plist" or "json". - If `None`, attempt to automatically detect the format by extension, used syntax - highlight or (with plist) the actual contents. - - target_format (str) = None - The target format. Any of "yaml", "plist" or "json". - If `None`, attempt to find an option set in the file to parse. - If unable to find an option, ask the user directly with all available format options. - output (OutputPanel) = None - """ - result = None - # Validate the shit again, but this time print to output panel - if source_format is not None and target_format == source_format: - result = output.print("\nTarget and source file format are identical. (%s)" - % target_format) - - if target_format not in dumpers.get: - result = output.print("\nDumper for '%s' not supported/implemented." - % target_format) - - return result - - - def run(self, source_format=None, target_format=None, ext=None, open_new_file=False, rearrange_yaml_syntax_def=False, _output=None, **kwargs): """Available parameters: @@ -195,24 +96,47 @@ def run(self, source_format=None, target_format=None, ext=None, """ self.view = self.window.active_view() - result = self._validate_run(self, source_format, target_format) - if result: - return result - + # Check the environment (view, args, ...) + if self.view.is_dirty(): + # Save the file so that source and target file on the drive don't differ + self.view.run_command("save") + if self.view.is_dirty(): + return sublime.error_message("The file could not be saved correctly. " + "The build was aborted") + file_path = self.view.file_name() if not file_path: return self.status("File does not exist.", file_path) file_path = Path(file_path) + if source_format and target_format == source_format: + return self.status("Target and source file format are identical. (%s)" % target_format) + + if source_format and source_format not in loaders.get: + return self.status("Loader for '%s' not supported/implemented." % source_format) + + if target_format and target_format not in dumpers.get: + return self.status("Dumper for '%s' not supported/implemented." % target_format) + # Now the actual "building" starts (collecting remaining parameters) with OutputPanel.create(self.window, "package_dev", read_only=True, force_writes=True) as output: output.show() - type_handling = self._auto_detect_file_type(source_format, target_format, output) - if type_handling: - return type_handling - + # Auto-detect the file type if it's not specified + if not source_format: + output.write("Input type not specified, auto-detecting...") + for Loader in loaders.get.values(): + if Loader.file_is_valid(self.view): + source_format = Loader.ext + output.print(' %s\n' % Loader.name) + break + + if not source_format: + return output.print("\nUnable to detect file type.") + elif target_format == source_format: + return output.print("File already is %s." % Loader.name) + # Load inline options Loader = loaders.get[source_format] opts = Loader.load_options(self.view) @@ -267,12 +191,14 @@ def on_select(index): return target_format = opts['target_format'] - result = self._revalidate_run(self, - output, - source_format, - target_format) - if result: - return result + # Validate the shit again, but this time print to output panel + if source_format is not None and target_format == source_format: + return output.print("\nTarget and source file format are identical. (%s)" + % target_format) + + if target_format not in dumpers.get: + return output.print("\nDumper for '%s' not supported/implemented." + % target_format) output.print(' %s\n' % dumpers.get[target_format].name) @@ -288,18 +214,16 @@ def on_select(index): output.print("Unexpected error occurred while parsing, " "please see the console for details.") raise + if not data: + return # Determine new file name new_file_path = file_path.with_suffix(get_new_ext(target_format)) new_dir = new_file_path.parent - valid_path = True try: os.makedirs(str(new_dir), exist_ok=True) except OSError: output.print("Could not create folder '%s'" % new_dir) - valid_path = False - - if not data or not valid_path: return # Now dump to new file From 757e22c8aa4f3c1b31a17c3d777af5e9e79b9c1e Mon Sep 17 00:00:00 2001 From: evidencebp Date: Mon, 18 Nov 2024 13:59:57 +0200 Subject: [PATCH 10/10] Revert "plugins\syntax_dev_legacy.py too-many-branches" This reverts commit 6b2349272fde43fbd992a4c3a3f02727e96aff9d. --- plugins/syntax_dev_legacy.py | 110 +++++++++++++---------------------- 1 file changed, 41 insertions(+), 69 deletions(-) diff --git a/plugins/syntax_dev_legacy.py b/plugins/syntax_dev_legacy.py index e77aa5d6..0f666345 100644 --- a/plugins/syntax_dev_legacy.py +++ b/plugins/syntax_dev_legacy.py @@ -122,6 +122,7 @@ def dump(self, data, sort=True, sort_order=None, sort_numeric=True, *args, **kwa self.output.print("Dumping %s..." % self.name) return yaml.dump(data, **params) + class PackagedevRearrangeYamlSyntaxDefCommand(sublime_plugin.TextCommand): """Parses YAML and sorts all the dict keys reasonably. Does not write to the file, only to the buffer. @@ -316,11 +317,6 @@ def filter_pattern_regs(reg): ############################################################################### -def inhibit(ret): - return (ret, sublime.INHIBIT_WORD_COMPLETIONS) - - - class LegacySyntaxDefCompletions(sublime_plugin.EventListener): def __init__(self): base_keys = "match,end,begin,name,contentName,comment,scopeName,include".split(',') @@ -338,74 +334,21 @@ def __init__(self): self.base_completions = completions - def _validate_query_completions(self, locations, view): - - if len(locations) > 1: - result = [] - # Do not bother if not in yaml-tmlanguage scope and within or at the end of a comment - elif not view.match_selector(locations[0] - , "source.yaml-tmlanguage - comment"): - result = [] - else: - result = None - - return result - - def _browse_nodes(self, nodes, tokens, window): - - node = None - # Browse the nodes and their children - for i, token in enumerate(tokens): - node = nodes.find(token) - if not node: - status( - "Warning: `%s` not found in scope naming conventions" - % '.'.join(tokens[:i + 1]), - window - ) - break - nodes = node.children - if not nodes: - break - - return node - - def _autocomplete(self, view, loc, window): - - # Auto-completion for include values using the repository keys - if view.match_selector(loc, "meta.include meta.value string, variable.other.include"): - # Search for the whole include string which contains the current location - reg = extract_selector(view, "meta.include meta.value string", loc) - include_text = view.substr(reg) - - if ( - not reg - or (not include_text.startswith("'#") - and not include_text.startswith('"#')) - ): - return [] - - variables = [view.substr(r) - for r in view.find_by_selector("variable.other.repository-key")] - status( - "Found %d local repository keys to be used in includes" % len(variables), - window - ) - return inhibit(zip(variables, variables)) - - return None - def on_query_completions(self, view, prefix, locations): # We can't work with multiple selections here window = view.window() - result = self._validate_query_completions(locations, view) - if result: - return result + if len(locations) > 1: + return [] loc = locations[0] + # Do not bother if not in yaml-tmlanguage scope and within or at the end of a comment + if not view.match_selector(loc, "source.yaml-tmlanguage - comment"): + return [] + def inhibit(ret): + return (ret, sublime.INHIBIT_WORD_COMPLETIONS) # Extend numerics into `'123': {name: $0}`, as used in captures, # but only if they are not in a string scope @@ -430,8 +373,20 @@ def on_query_completions(self, view, prefix, locations): if len(tokens) > 1: del tokens[-1] # The last token is either incomplete or empty + # Browse the nodes and their children nodes = COMPILED_HEADS - node = self._browse_nodes(nodes, tokens, window) + for i, token in enumerate(tokens): + node = nodes.find(token) + if not node: + status( + "Warning: `%s` not found in scope naming conventions" + % '.'.join(tokens[:i + 1]), + window + ) + break + nodes = node.children + if not nodes: + break if nodes and node: return inhibit(nodes.to_completion()) @@ -461,9 +416,26 @@ def on_query_completions(self, view, prefix, locations): # Due to "." being set as a trigger this should not be computed after the block above return [] - auto_complete = self._autocomplete(self, view, loc, window) - if auto_complete: - return auto_complete + # Auto-completion for include values using the repository keys + if view.match_selector(loc, "meta.include meta.value string, variable.other.include"): + # Search for the whole include string which contains the current location + reg = extract_selector(view, "meta.include meta.value string", loc) + include_text = view.substr(reg) + + if ( + not reg + or (not include_text.startswith("'#") + and not include_text.startswith('"#')) + ): + return [] + + variables = [view.substr(r) + for r in view.find_by_selector("variable.other.repository-key")] + status( + "Found %d local repository keys to be used in includes" % len(variables), + window + ) + return inhibit(zip(variables, variables)) # Do not bother if the syntax def already matched the current position, # except in the main repository