diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 26e20a5d..00000000 --- a/.flake8 +++ /dev/null @@ -1,14 +0,0 @@ -[flake8] -select = - E - W - F -ignore = - W503 # makes Flake8 work like black - W504 - E203 # makes Flake8 work like black - E741 - E501 # long line checking is done in black -exclude = test/ -per-file-ignores = - */__init__.py: F401 diff --git a/.github/actions/setup-python-hatch/action.yml b/.github/actions/setup-python-hatch/action.yml new file mode 100644 index 00000000..1ab687f6 --- /dev/null +++ b/.github/actions/setup-python-hatch/action.yml @@ -0,0 +1,20 @@ +name: Setup Python env +description: Install Python & Hatch +inputs: + python-version: + description: 'Version of Python to Install' + required: true + default: '3.9' +runs: + using: "composite" + steps: + - name: "Set up Python ${{ inputs.python-version }}" + uses: actions/setup-python@v4 + with: + python-version: "${{ inputs.python-version }}" + + - name: Install Hatch + shell: bash + run: | + python -m pip install --user --upgrade pip + python -m pip install hatch diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5633c4f2..b50bd1c5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,50 +42,14 @@ jobs: - name: Check out the repository uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: "Set up Python & Hatch - 3.11" + uses: ./.github/actions/setup-python-hatch with: - python-version: '3.11' - - - name: "Install build specific python dependencies" - run: | - python -m pip install --user --upgrade pip - python -m pip install --upgrade wheel twine check-wheel-contents - python -m pip --version - - - name: "Install Hatch" - shell: bash - run: pip3 install hatch + python-version: "3.11" - name: "Build Python Package" run: | hatch build - - name: "Show distributions" - run: ls -lh dist/ - - - name: "Check distribution descriptions" - run: | - twine check dist/* - - - name: "Check wheel contents" - run: | - check-wheel-contents dist/*.whl --ignore W007,W008 - - - name: "Install wheel distributions" - run: | - find ./dist/dbt_common-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ - - # TODO: how to validate here? we did dbt --version previously. this checks it's there, but not that it can do anything. maybe it's enough? - - name: "Check wheel distributions" - run: | - pip freeze | grep dbt-common - - - name: "Install source distributions" - run: | - find ./dist/dbt_common-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ - - # TODO: how to validate here? we did dbt --version previously. this checks it's there, but not that it can do anything. maybe it's enough? - - name: "Check source distributions" - run: | - pip freeze | grep dbt-common + - name: "Check build" + run: hatch run build:check-all diff --git a/.github/workflows/ci_code_quality.yml b/.github/workflows/ci_code_quality.yml index cb576640..bd7aaf95 100644 --- a/.github/workflows/ci_code_quality.yml +++ b/.github/workflows/ci_code_quality.yml @@ -42,14 +42,14 @@ jobs: - name: Check out the repository uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: "Set up Python & Hatch - 3.11" + uses: ./.github/actions/setup-python-hatch with: - python-version: '3.11' + python-version: "3.11" - - name: Install Hatch + - name: Install pre-commit shell: bash - run: pip3 install hatch + run: pip3 install pre-commit - name: Run Pre-commit Hooks - run: hatch run dev-env:pre-commit run --show-diff-on-failure --color=always --all-files + run: pre-commit run --show-diff-on-failure --color=always --all-files diff --git a/.github/workflows/ci_dbt_core_testing.yml b/.github/workflows/ci_dbt_core_testing.yml index 37255625..5ee8fb31 100644 --- a/.github/workflows/ci_dbt_core_testing.yml +++ b/.github/workflows/ci_dbt_core_testing.yml @@ -33,10 +33,10 @@ jobs: steps: - name: "Check out dbt-core" - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: "Set up Python 3.11" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.11" diff --git a/.github/workflows/ci_tests.yml b/.github/workflows/ci_tests.yml index b09c97c4..2c5e18a5 100644 --- a/.github/workflows/ci_tests.yml +++ b/.github/workflows/ci_tests.yml @@ -47,17 +47,13 @@ jobs: - name: "Check out the repository" uses: actions/checkout@v4 - - name: "Set up Python ${{ matrix.python-version }}" - uses: actions/setup-python@v5 + - name: "Set up Python & Hatch - ${{ matrix.python-version }}" + uses: ./.github/actions/setup-python-hatch with: python-version: "${{ matrix.python-version }}" - - name: "Install Hatch" - shell: bash - run: pip3 install hatch - - name: "Run Tests" - run: hatch run dev-env:pytest tests + run: hatch run test:unit - name: "Get current date" if: always() diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3d55e0d5..746db253 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,6 +10,7 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v3.2.0 hooks: + - id: debug-statements - id: check-yaml args: [--unsafe] - id: end-of-file-fixer @@ -17,44 +18,26 @@ repos: exclude_types: - "markdown" - id: check-case-conflict -- repo: https://github.com/psf/black - rev: 22.3.0 +- repo: local hooks: - id: black - - id: black - alias: black-check - stages: [manual] - args: - - "--check" - - "--diff" -- repo: https://github.com/pycqa/flake8 - rev: 4.0.1 - hooks: + name: black + entry: hatch run lint:black + language: system + types: [python] + pass_filenames: false + verbose: true - id: flake8 - - id: flake8 - alias: flake8-check - stages: [manual] -# - repo: https://github.com/pre-commit/mirrors-mypy -# rev: v1.4.1 -# hooks: -# - id: mypy -# # N.B.: Mypy is... a bit fragile. -# # -# # By using `language: system` we run this hook in the local -# # environment instead of a pre-commit isolated one. This is needed -# # to ensure mypy correctly parses the project. - -# # It may cause trouble -# # in that it adds environmental variables out of our control to the -# # mix. Unfortunately, there's nothing we can do about per pre-commit's -# # author. -# # See https://github.com/pre-commit/pre-commit/issues/730 for details. -# args: [--show-error-codes] -# files: ^dbt_common/ -# language: system -# - id: mypy -# alias: mypy-check -# stages: [manual] -# args: [--show-error-codes, --pretty] -# files: ^dbt_common -# language: system + name: flake8 + entry: hatch run lint:flake8 + language: system + types: [python] + pass_filenames: false + verbose: true + - id: mypy + name: mypy + entry: hatch run lint:mypy + language: system + types: [python] + pass_filenames: false + verbose: true diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9075e35b..690f7120 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -54,6 +54,7 @@ There are some tools that will be helpful to you in developing locally. While th These are the tools used in `dbt-common` development and testing: +- [`hatch`](https://hatch.pypa.io/latest/) for project management - [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting - [`black`](https://github.com/psf/black) for code formatting - [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking @@ -62,31 +63,15 @@ These are the tools used in `dbt-common` development and testing: A deep understanding of these tools in not required to effectively contribute to `dbt-common`, but we recommend checking out the attached documentation if you're interested in learning more about each one. -#### Virtual environments - -We strongly recommend using virtual environments when developing code in `dbt-common`. We recommend creating this virtualenv -in the root of the `dbt-common` repository. To create a new virtualenv, run: -```sh -python3 -m venv env -source env/bin/activate -``` - -This will create and activate a new Python virtual environment. - ## Running `dbt-common` in development ### Installation -First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-common` (and its dependencies): - -```sh -git -pre-commit install -``` +Ensure you have the latest version of pip installed with `pip install --upgrade pip` as well as [hatch](https://hatch.pypa.io/latest/install/). ### Running `dbt-common` -This repository is just a template and cannot be run. +This repository cannot be run on its own. ## Testing @@ -98,29 +83,31 @@ Once you're able to manually test that your code change is working as expected, ### Initial setup -None needed. - -### Test commands +- [Install pre-commit](https://pre-commit.com/#usage) +- [Install hatch](https://hatch.pypa.io/1.7/install/#pip) -No tests included. +- Nothing needed to set up your environments. hatch will create your environment as defined in the `pyproject.toml` when you run. +### Hatch Commands -### Unit, Integration, Functional? +See the pyproject.toml for a complete list of custom commands. See the h[atch docs](https://hatch.pypa.io/latest/cli/reference/) for a description of built in commands and flags. -Here are some general rules for adding tests: -* unit tests (`tests/unit`) don’t need to access a database; "pure Python" tests should be written as unit tests -* functional tests (`tests/functional`) cover anything that interacts with a database, namely adapter +Run `hatch env show` to view a list of all envoronments and all commands available within them. -## Debugging +Example uses: -1. The logs for a `dbt run` have stack traces and other information for debugging errors (in `logs/dbt.log` in your project directory). -2. Try using a debugger, like `ipdb`. For pytest: `--pdb --pdbcls=IPython.terminal.debugger:pdb` -3. +|Type|Command|Description| +|---|---|---| +|Utility|`hatch run proto`|regenerate protobuf definitions| +|Testing|`hatch run test:unit`|run all tests| +|Code Quality|`hatch run lint:all`|run black, flake8 and mypy checks| +|Code Quality|`hatch run lint:black`|run black| +|Shell|`hatch shell`|Drops you into the default shell with project + dev requirements installed. Use `exit` to leave the shell, _not_ `deactivate`.| +|Shell|`hatch -e shell`|Drops you into a shell of the specified environment. Use `exit` to leave the shell, _not_ `deactivate`.| ### Assorted development tips * Append `# type: ignore` to the end of a line if you need to disable `mypy` on that line. * Sometimes flake8 complains about lines that are actually fine, in which case you can put a comment on the line such as: # noqa or # noqa: ANNN, where ANNN is the error code that flake8 issues. -* To collect output for `CProfile`, run dbt with the `-r` option and the name of an output file, i.e. `dbt -r dbt.cprof run`. If you just want to profile parsing, you can do: `dbt -r dbt.cprof parse`. `pip` install `snakeviz` to view the output. Run `snakeviz dbt.cprof` and output will be rendered in a browser window. ## Adding or modifying a CHANGELOG Entry diff --git a/Makefile b/Makefile deleted file mode 100644 index dad020b0..00000000 --- a/Makefile +++ /dev/null @@ -1,32 +0,0 @@ -.DEFAULT_GOAL:=help - - -.PHONY: run install-hatch overwrite-pre-commit install test lint json_schema - -run: - export FORMAT_JSON_LOGS="1" - -install-hatch: - pip3 install hatch - -# This edits your local pre-commit hook file to use Hatch when executing. -overwrite-pre-commit: - hatch run dev-env:pre-commit install - hatch run dev-env:sed -i -e "s/exec /exec hatch run dev-env:/g" .git/hooks/pre-commit - -test: - export FORMAT_JSON_LOGS="1" && hatch -v run dev-env:pytest -n auto tests - -lint: - hatch run dev-env:pre-commit run --show-diff-on-failure --color=always --all-files - -.PHONY: proto_types -proto_types: ## generates google protobuf python file from types.proto - protoc -I=./dbt_common/events --python_out=./dbt_common/events ./dbt_common/events/types.proto - -.PHONY: help -help: ## Show this help message. - @echo 'usage: make [target]' - @echo - @echo 'targets:' - @grep -E '^[8+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/dbt_common/__about__.py b/dbt_common/__about__.py index 9f7a875c..1663d823 100644 --- a/dbt_common/__about__.py +++ b/dbt_common/__about__.py @@ -1 +1 @@ -version = "0.1.0" +version = "0.1.1" diff --git a/dbt_common/clients/_jinja_blocks.py b/dbt_common/clients/_jinja_blocks.py index c6058bfa..e4eeba85 100644 --- a/dbt_common/clients/_jinja_blocks.py +++ b/dbt_common/clients/_jinja_blocks.py @@ -28,7 +28,11 @@ def __init__(self, contents: str) -> None: class BlockTag: def __init__( - self, block_type_name: str, block_name: str, contents: Optional[str] = None, full_block: Optional[str] = None + self, + block_type_name: str, + block_name: str, + contents: Optional[str] = None, + full_block: Optional[str] = None, ) -> None: self.block_type_name = block_type_name self.block_name = block_name @@ -106,7 +110,9 @@ def __init__(self, text: str) -> None: self.pos: int = 0 def linepos(self, end: Optional[int] = None) -> str: - """Given an absolute position in the input text, return a pair of + """Return relative position in line. + + Given an absolute position in the input data, return a pair of line number + relative position to the start of the line. """ end_val: int = self.pos if end is None else end @@ -148,7 +154,9 @@ def _expect_match(self, expected_name: str, *patterns) -> re.Match: # type: ign return match def handle_expr(self, match: re.Match) -> None: - """Handle an expression. At this point we're at a string like: + """Handle an expression. + + At this point we're at a string like: {{ 1 + 2 }} ^ right here @@ -180,6 +188,7 @@ def handle_comment(self, match: re.Match) -> None: def _expect_block_close(self) -> None: """Search for the tag close marker. + To the right of the type name, there are a few possiblities: - a name (handled by the regex's 'block_name') - any number of: `=`, `(`, `)`, strings, etc (arguments) @@ -191,7 +200,9 @@ def _expect_block_close(self) -> None: are quote and `%}` - nothing else can hide the %} and be valid jinja. """ while True: - end_match = self._expect_match('tag close ("%}")', QUOTE_START_PATTERN, TAG_CLOSE_PATTERN) + end_match = self._expect_match( + 'tag close ("%}")', QUOTE_START_PATTERN, TAG_CLOSE_PATTERN + ) self.advance(end_match.end()) if end_match.groupdict().get("tag_close") is not None: return @@ -207,7 +218,9 @@ def handle_raw(self) -> int: return match.end() def handle_tag(self, match: re.Match) -> Tag: - """The tag could be one of a few things: + """Determine tag type. + + The tag could be one of a few things: {% mytag %} {% mytag x = y %} @@ -229,11 +242,15 @@ def handle_tag(self, match: re.Match) -> Tag: else: self.advance(match.end()) self._expect_block_close() - return Tag(block_type_name=block_type_name, block_name=block_name, start=start_pos, end=self.pos) + return Tag( + block_type_name=block_type_name, block_name=block_name, start=start_pos, end=self.pos + ) def find_tags(self) -> Iterator[Tag]: while True: - match = self._first_match(BLOCK_START_PATTERN, COMMENT_START_PATTERN, EXPR_START_PATTERN) + match = self._first_match( + BLOCK_START_PATTERN, COMMENT_START_PATTERN, EXPR_START_PATTERN + ) if match is None: break @@ -253,7 +270,8 @@ def find_tags(self) -> Iterator[Tag]: yield self.handle_tag(match) else: raise DbtInternalError( - "Invalid regex match in next_block, expected block start, " "expr start, or comment start" + "Invalid regex match in next_block, expected block start, " + "expr start, or comment start" ) def __iter__(self) -> Iterator[Tag]: @@ -349,4 +367,6 @@ def find_blocks( def lex_for_blocks( self, allowed_blocks: Optional[Set[str]] = None, collect_raw_data: bool = True ) -> List[Union[BlockData, BlockTag]]: - return list(self.find_blocks(allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data)) + return list( + self.find_blocks(allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data) + ) diff --git a/dbt_common/clients/agate_helper.py b/dbt_common/clients/agate_helper.py index 4f937c2c..3aade66d 100644 --- a/dbt_common/clients/agate_helper.py +++ b/dbt_common/clients/agate_helper.py @@ -1,13 +1,13 @@ from codecs import BOM_UTF8 -import agate +import agate # type: ignore import datetime import isodate import json from typing import Iterable, List, Dict, Union, Optional, Any from dbt_common.exceptions import DbtRuntimeError -from dbt_common.utils import ForgivingJSONEncoder +from dbt_common.utils.encoding import ForgivingJSONEncoder BOM = BOM_UTF8.decode("utf-8") # '\ufeff' @@ -17,7 +17,7 @@ def cast(self, d): # by default agate will cast none as a Number # but we need to cast it as an Integer to preserve # the type when merging and unioning tables - if type(d) == int or d is None: + if type(d) == int or d is None: # noqa [E721] return d else: raise agate.exceptions.CastError('Can not parse value "%s" as Integer.' % d) @@ -30,7 +30,7 @@ class Number(agate.data_types.Number): # undo the change in https://github.com/wireservice/agate/pull/733 # i.e. do not cast True and False to numeric 1 and 0 def cast(self, d): - if type(d) == bool: + if type(d) == bool: # noqa [E721] raise agate.exceptions.CastError("Do not cast True to 1 or False to 0.") else: return super().cast(d) @@ -59,14 +59,15 @@ def cast(self, d): def build_type_tester( text_columns: Iterable[str], string_null_values: Optional[Iterable[str]] = ("null", "") ) -> agate.TypeTester: - types = [ Integer(null_values=("null", "")), Number(null_values=("null", "")), agate.data_types.Date(null_values=("null", ""), date_format="%Y-%m-%d"), agate.data_types.DateTime(null_values=("null", ""), datetime_format="%Y-%m-%d %H:%M:%S"), ISODateTime(null_values=("null", "")), - agate.data_types.Boolean(true_values=("true",), false_values=("false",), null_values=("null", "")), + agate.data_types.Boolean( + true_values=("true",), false_values=("false",), null_values=("null", "") + ), agate.data_types.Text(null_values=string_null_values), ] force = {k: agate.data_types.Text(null_values=string_null_values) for k in text_columns} @@ -92,13 +93,13 @@ def table_from_rows( def table_from_data(data, column_names: Iterable[str]) -> agate.Table: - "Convert a list of dictionaries into an Agate table" + """Convert a list of dictionaries into an Agate table. - # The agate table is generated from a list of dicts, so the column order - # from `data` is not preserved. We can use `select` to reorder the columns - # - # If there is no data, create an empty table with the specified columns + The agate table is generated from a list of dicts, so the column order + from `data` is not preserved. We can use `select` to reorder the columns + If there is no data, create an empty table with the specified columns + """ if len(data) == 0: return agate.Table([], column_names=column_names) else: @@ -107,13 +108,13 @@ def table_from_data(data, column_names: Iterable[str]) -> agate.Table: def table_from_data_flat(data, column_names: Iterable[str]) -> agate.Table: - """ - Convert a list of dictionaries into an Agate table. This method does not + """Convert a list of dictionaries into an Agate table. + + This method does not coerce string values into more specific types (eg. '005' will not be coerced to '5'). Additionally, this method does not coerce values to None (eg. '' or 'null' will retain their string literal representations). """ - rows = [] text_only_columns = set() for _row in data: @@ -130,18 +131,21 @@ def table_from_data_flat(data, column_names: Iterable[str]) -> agate.Table: rows.append(row) - return table_from_rows(rows=rows, column_names=column_names, text_only_columns=text_only_columns) + return table_from_rows( + rows=rows, column_names=column_names, text_only_columns=text_only_columns + ) def empty_table(): - "Returns an empty Agate table. To be used in place of None" + """Returns an empty Agate table. + To be used in place of None + """ return agate.Table(rows=[]) def as_matrix(table): - "Return an agate table as a matrix of data sans columns" - + """Return an agate table as a matrix of data sans columns.""" return [r.values() for r in table.rows.values()] @@ -176,7 +180,8 @@ def __setitem__(self, key, value): elif isinstance(value, _NullMarker): # use the existing value return - # when one table column is Number while another is Integer, force the column to Number on merge + # when one table column is Number while another is Integer, + # force the column to Number on merge elif isinstance(value, Integer) and isinstance(existing_type, agate.data_types.Number): # use the existing value return @@ -203,8 +208,11 @@ def finalize(self) -> Dict[str, agate.data_types.DataType]: def _merged_column_types(tables: List[agate.Table]) -> Dict[str, agate.data_types.DataType]: - # this is a lot like agate.Table.merge, but with handling for all-null - # rows being "any type". + """Custom version of agate.Table.merge. + + this is a lot like agate.Table.merge, but with handling for all-null + rows being "any type". + """ new_columns: ColumnTypeBuilder = ColumnTypeBuilder() for table in tables: for i in range(len(table.columns)): @@ -219,8 +227,9 @@ def _merged_column_types(tables: List[agate.Table]) -> Dict[str, agate.data_type def merge_tables(tables: List[agate.Table]) -> agate.Table: - """This is similar to agate.Table.merge, but it handles rows of all 'null' - values more gracefully during merges. + """This is similar to agate.Table.merge. + + This handles rows of all 'null' values more gracefully during merges. """ new_columns = _merged_column_types(tables) column_names = tuple(new_columns.keys()) diff --git a/dbt_common/clients/jinja.py b/dbt_common/clients/jinja.py index b8c0d03a..b04ffae9 100644 --- a/dbt_common/clients/jinja.py +++ b/dbt_common/clients/jinja.py @@ -9,14 +9,14 @@ from typing import Any, Callable, Dict, Iterator, List, Mapping, Optional, Union, Set, Type from typing_extensions import Protocol -import jinja2 -import jinja2.ext +import jinja2 # type: ignore +import jinja2.ext # type: ignore import jinja2.nativetypes # type: ignore -import jinja2.nodes -import jinja2.parser -import jinja2.sandbox +import jinja2.nodes # type: ignore +import jinja2.parser # type: ignore +import jinja2.sandbox # type: ignore -from dbt_common.utils import ( +from dbt_common.utils.jinja import ( get_dbt_macro_name, get_docs_macro_name, get_materialization_macro_name, @@ -86,7 +86,13 @@ def _parse(self, source, name, filename): return MacroFuzzParser(self, source, name, filename).parse() def _compile(self, source, filename): - """Override jinja's compilation to stash the rendered source inside + """ + + + + + + Override jinja's compilation. Use to stash the rendered source inside the python linecache for debugging when the appropriate environment variable is set. @@ -112,7 +118,10 @@ def new_context( # This custom override makes the assumption that the locals and shared # parameters are not used, so enforce that. if shared or locals: - raise Exception("The MacroFuzzTemplate.new_context() override cannot use the shared or locals parameters.") + raise Exception( + "The MacroFuzzTemplate.new_context() override cannot use the " + "shared or locals parameters." + ) parent = ChainMap(vars, self.globals) if self.globals else vars @@ -120,7 +129,9 @@ def new_context( def render(self, *args: Any, **kwargs: Any) -> Any: if kwargs or len(args) != 1: - raise Exception("The MacroFuzzTemplate.render() override requires exactly one argument.") + raise Exception( + "The MacroFuzzTemplate.render() override requires exactly one argument." + ) ctx = self.new_context(args[0]) @@ -140,16 +151,14 @@ class NativeSandboxEnvironment(MacroFuzzEnvironment): class TextMarker(str): - """A special native-env marker that indicates a value is text and is - not to be evaluated. Use this to prevent your numbery-strings from becoming - numbers! + """A special native-env marker that indicates a value is text and is not to be evaluated. + + Use this to prevent your numbery-strings from becoming numbers! """ class NativeMarker(str): - """A special native-env marker that indicates the field should be passed to - literal_eval. - """ + """A special native-env marker that indicates the field should be passed to literal_eval.""" class BoolMarker(NativeMarker): @@ -165,7 +174,9 @@ def _is_number(value) -> bool: def quoted_native_concat(nodes): - """This is almost native_concat from the NativeTemplate, except in the + """Handle special case for native_concat from the NativeTemplate. + + This is almost native_concat from the NativeTemplate, except in the special case of a single argument that is a quoted string and returns a string, the quotes are re-inserted. """ @@ -201,9 +212,10 @@ class NativeSandboxTemplate(jinja2.nativetypes.NativeTemplate): # mypy: ignore environment_class = NativeSandboxEnvironment # type: ignore def render(self, *args, **kwargs): - """Render the template to produce a native Python type. If the - result is a single node, its value is returned. Otherwise, the - nodes are concatenated as strings. If the result can be parsed + """Render the template to produce a native Python type. + + If the result is a single node, its value is returned. Otherwise, + the nodes are concatenated as strings. If the result can be parsed with :func:`ast.literal_eval`, the parsed value is returned. Otherwise, the string is returned. """ @@ -415,7 +427,9 @@ def __getitem__(self, name): def __getattr__(self, name): if name == "name" or _is_dunder_name(name): - raise AttributeError("'{}' object has no attribute '{}'".format(type(self).__name__, name)) + raise AttributeError( + "'{}' object has no attribute '{}'".format(type(self).__name__, name) + ) self.name = name @@ -463,7 +477,6 @@ def get_environment( args["extensions"].append(TestExtension) env_cls: Type[jinja2.Environment] - text_filter: Type if native: env_cls = NativeSandboxEnvironment filters = NATIVE_FILTERS @@ -520,8 +533,9 @@ def extract_toplevel_blocks( allowed_blocks: Optional[Set[str]] = None, collect_raw_data: bool = True, ) -> List[Union[BlockData, BlockTag]]: - """Extract the top-level blocks with matching block types from a jinja - file, with some special handling for block nesting. + """Extract the top-level blocks with matching block types from a jinja file. + + Includes some special handling for block nesting. :param data: The data to extract blocks from. :param allowed_blocks: The names of the blocks to extract from the file. @@ -535,4 +549,6 @@ def extract_toplevel_blocks( `collect_raw_data` is `True`) `BlockData` objects. """ tag_iterator = TagIterator(text) - return BlockIterator(tag_iterator).lex_for_blocks(allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data) + return BlockIterator(tag_iterator).lex_for_blocks( + allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data + ) diff --git a/dbt_common/clients/system.py b/dbt_common/clients/system.py index f637af68..c4b4e723 100644 --- a/dbt_common/clients/system.py +++ b/dbt_common/clients/system.py @@ -41,7 +41,8 @@ def find_matching( file_pattern: str, ignore_spec: Optional[PathSpec] = None, ) -> List[Dict[str, Any]]: - """ + """Return file info from paths and patterns. + Given an absolute `root_path`, a list of relative paths to that absolute root path (`relative_paths_to_search`), and a `file_pattern` like '*.sql', returns information about the files. For example: @@ -78,7 +79,9 @@ def find_matching( relative_path_to_root = os.path.join(relative_path_to_search, relative_path) modification_time = os.path.getmtime(absolute_path) - if reobj.match(local_file) and (not ignore_spec or not ignore_spec.match_file(relative_path_to_root)): + if reobj.match(local_file) and ( + not ignore_spec or not ignore_spec.match_file(relative_path_to_root) + ): matching.append( { "searched_path": relative_path_to_search, @@ -104,7 +107,8 @@ def load_file_contents(path: str, strip: bool = True) -> str: @functools.singledispatch def make_directory(path=None) -> None: - """ + """Handle directory creation with threading. + Make a directory and any intermediate directories that don't already exist. This function handles the case where two threads try to create a directory at once. @@ -133,7 +137,8 @@ def _(path: Path) -> None: def make_file(path: str, contents: str = "", overwrite: bool = False) -> bool: - """ + """Make a file with `contents` at `path`. + Make a file at `path` assuming that the directory it resides in already exists. The file is saved with contents `contents` """ @@ -147,9 +152,7 @@ def make_file(path: str, contents: str = "", overwrite: bool = False) -> bool: def make_symlink(source: str, link_path: str) -> None: - """ - Create a symlink at `link_path` referring to `source`. - """ + """Create a symlink at `link_path` referring to `source`.""" if not supports_symlinks(): # TODO: why not import these at top? raise dbt_common.exceptions.SymbolicLinkError() @@ -209,9 +212,7 @@ def _windows_rmdir_readonly(func: Callable[[str], Any], path: str, exc: Tuple[An def resolve_path_from_base(path_to_resolve: str, base_path: str) -> str: - """ - If path_to_resolve is a relative path, create an absolute path - with base_path as the base. + """If path_to_resolve is a relative path, create an absolute path with base_path as the base. If path_to_resolve is an absolute path or a user path (~), just resolve it to an absolute path and return. @@ -220,8 +221,9 @@ def resolve_path_from_base(path_to_resolve: str, base_path: str) -> str: def rmdir(path: str) -> None: - """ - Recursively deletes a directory. Includes an error handler to retry with + """Recursively deletes a directory. + + Includes an error handler to retry with different permissions on Windows. Otherwise, removing directories (eg. cloned via git) can cause rmtree to throw a PermissionError exception """ @@ -235,9 +237,7 @@ def rmdir(path: str) -> None: def _win_prepare_path(path: str) -> str: - """Given a windows path, prepare it for use by making sure it is absolute - and normalized. - """ + """Given a windows path, prepare it for use by making sure it is absolute and normalized.""" path = os.path.normpath(path) # if a path starts with '\', splitdrive() on it will return '' for the @@ -281,7 +281,9 @@ def _supports_long_paths() -> bool: def convert_path(path: str) -> str: - """Convert a path that dbt has, which might be >260 characters long, to one + """Handle path length for windows. + + Convert a path that dbt has, which might be >260 characters long, to one that will be writable/readable on Windows. On other platforms, this is a no-op. @@ -387,14 +389,18 @@ def _handle_windows_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn: cls: Type[dbt_common.exceptions.DbtBaseException] = dbt_common.exceptions.base.CommandError if exc.errno == errno.ENOENT: message = ( - "Could not find command, ensure it is in the user's PATH " "and that the user has permissions to run it" + "Could not find command, ensure it is in the user's PATH " + "and that the user has permissions to run it" ) cls = dbt_common.exceptions.ExecutableError elif exc.errno == errno.ENOEXEC: message = "Command was not executable, ensure it is valid" cls = dbt_common.exceptions.ExecutableError elif exc.errno == errno.ENOTDIR: - message = "Unable to cd: path does not exist, user does not have" " permissions, or not a directory" + message = ( + "Unable to cd: path does not exist, user does not have" + " permissions, or not a directory" + ) cls = dbt_common.exceptions.WorkingDirectoryError else: message = 'Unknown error: {} (errno={}: "{}")'.format( @@ -415,7 +421,9 @@ def _interpret_oserror(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn: _handle_posix_error(exc, cwd, cmd) # this should not be reachable, raise _something_ at least! - raise dbt_common.exceptions.DbtInternalError("Unhandled exception in _interpret_oserror: {}".format(exc)) + raise dbt_common.exceptions.DbtInternalError( + "Unhandled exception in _interpret_oserror: {}".format(exc) + ) def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> Tuple[bytes, bytes]: @@ -434,7 +442,9 @@ def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> T exe_pth = shutil.which(cmd[0]) if exe_pth: cmd = [os.path.abspath(exe_pth)] + list(cmd[1:]) - proc = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=full_env) + proc = subprocess.Popen( + cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=full_env + ) out, err = proc.communicate() except OSError as exc: @@ -450,7 +460,9 @@ def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> T return out, err -def download_with_retries(url: str, path: str, timeout: Optional[Union[float, tuple]] = None) -> None: +def download_with_retries( + url: str, path: str, timeout: Optional[Union[float, tuple]] = None +) -> None: download_fn = functools.partial(download, url, path, timeout) connection_exception_retry(download_fn, 5) @@ -496,6 +508,7 @@ def untar_package(tar_path: str, dest_dir: str, rename_to: Optional[str] = None) def chmod_and_retry(func, path, exc_info): """Define an error handler to pass to shutil.rmtree. + On Windows, when a file is marked read-only as git likes to do, rmtree will fail. To handle that, on errors try to make the file writable. We want to retry most operations here, but listdir is one that we know will @@ -513,7 +526,9 @@ def _absnorm(path): def move(src, dst): - """A re-implementation of shutil.move that properly removes the source + """A re-implementation of shutil.move for windows fun. + + A re-implementation of shutil.move that properly removes the source directory on windows when it has read-only files in it and the move is between two drives. @@ -541,7 +556,9 @@ def move(src, dst): if os.path.isdir(src): if _absnorm(dst + "\\").startswith(_absnorm(src + "\\")): # dst is inside src - raise EnvironmentError("Cannot move a directory '{}' into itself '{}'".format(src, dst)) + raise EnvironmentError( + "Cannot move a directory '{}' into itself '{}'".format(src, dst) + ) shutil.copytree(src, dst, symlinks=True) rmtree(src) else: @@ -550,8 +567,9 @@ def move(src, dst): def rmtree(path): - """Recursively remove the path. On permissions errors on windows, try to remove - the read-only flag and try again. + """Recursively remove the path. + + On permissions errors on windows, try to remove the read-only flag and try again. """ path = convert_path(path) return shutil.rmtree(path, onerror=chmod_and_retry) diff --git a/dbt_common/contracts/config/base.py b/dbt_common/contracts/config/base.py index 899c7643..a16b4d9b 100644 --- a/dbt_common/contracts/config/base.py +++ b/dbt_common/contracts/config/base.py @@ -38,7 +38,9 @@ def __setitem__(self, key, value): def __delitem__(self, key): if hasattr(self, key): - msg = ('Error, tried to delete config key "{}": Cannot delete ' "built-in keys").format(key) + msg = ( + 'Error, tried to delete config key "{}": Cannot delete ' "built-in keys" + ).format(key) raise CompilationError(msg) else: del self._extra[key] @@ -110,7 +112,9 @@ def same_contents(cls, unrendered: Dict[str, Any], other: Dict[str, Any]) -> boo @classmethod def _merge_dicts(cls, src: Dict[str, Any], data: Dict[str, Any]) -> Dict[str, Any]: - """Find all the items in data that match a target_field on this class, + """Mutate input to return merge results. + + Find all the items in data that match a target_field on this class, and merge them with the data found in `src` for target_field, using the field's specified merge behavior. Matching items will be removed from `data` (but _not_ `src`!). @@ -141,8 +145,12 @@ def _merge_dicts(cls, src: Dict[str, Any], data: Dict[str, Any]) -> Dict[str, An ) return result - def update_from(self: T, data: Dict[str, Any], config_cls: Type[BaseConfig], validate: bool = True) -> T: - """Given a dict of keys, update the current config from them, validate + def update_from( + self: T, data: Dict[str, Any], config_cls: Type[BaseConfig], validate: bool = True + ) -> T: + """Update and validate config given a dict. + + Given a dict of keys, update the current config from them, validate it, and return a new config with the updated values """ dct = self.to_dict(omit_none=False) diff --git a/dbt_common/contracts/constraints.py b/dbt_common/contracts/constraints.py index ce3d1513..c01ee6f8 100644 --- a/dbt_common/contracts/constraints.py +++ b/dbt_common/contracts/constraints.py @@ -30,8 +30,12 @@ class ColumnLevelConstraint(dbtClassMixin): # It could be a predicate (check type), or a sequence sql keywords (e.g. unique type), # so the vague naming of 'expression' is intended to capture this range. expression: Optional[str] = None - warn_unenforced: bool = True # Warn if constraint cannot be enforced by platform but will be in DDL - warn_unsupported: bool = True # Warn if constraint is not supported by the platform and won't be in DDL + warn_unenforced: bool = ( + True # Warn if constraint cannot be enforced by platform but will be in DDL + ) + warn_unsupported: bool = ( + True # Warn if constraint is not supported by the platform and won't be in DDL + ) @dataclass diff --git a/dbt_common/dataclass_schema.py b/dbt_common/dataclass_schema.py index d718604b..a6d2b069 100644 --- a/dbt_common/dataclass_schema.py +++ b/dbt_common/dataclass_schema.py @@ -47,7 +47,9 @@ class dbtMashConfig(MashBaseConfig): # This class pulls in DataClassDictMixin from Mashumaro. The 'to_dict' # and 'from_dict' methods come from Mashumaro. class dbtClassMixin(DataClassDictMixin): - """The Mixin adds methods to generate a JSON schema and + """Convert and validate JSON schemas. + + The Mixin adds methods to generate a JSON schema and convert to and from JSON encodable dicts with validation against the schema """ diff --git a/dbt_common/events/__init__.py b/dbt_common/events/__init__.py index b200d081..6ba789b1 100644 --- a/dbt_common/events/__init__.py +++ b/dbt_common/events/__init__.py @@ -4,4 +4,6 @@ from dbt_common.events.logger import LineFormat # make sure event manager starts with a logger -get_event_manager().add_logger(get_stdout_config(LineFormat.PlainText, True, EventLevel.INFO, False)) +get_event_manager().add_logger( + get_stdout_config(LineFormat.PlainText, True, EventLevel.INFO, False) +) diff --git a/dbt_common/events/base_types.py b/dbt_common/events/base_types.py index 98b15738..2a90e78f 100644 --- a/dbt_common/events/base_types.py +++ b/dbt_common/events/base_types.py @@ -49,7 +49,7 @@ class EventLevel(str, Enum): class BaseEvent: - """BaseEvent for proto message generated python events""" + """BaseEvent for proto message generated python events.""" PROTO_TYPES_MODULE = types_pb2 @@ -59,7 +59,9 @@ def __init__(self, *args, **kwargs) -> None: if class_name == "Formatting" and len(args) > 0: kwargs["msg"] = args[0] args = () - assert len(args) == 0, f"[{class_name}] Don't use positional arguments when constructing logging events" + assert ( + len(args) == 0 + ), f"[{class_name}] Don't use positional arguments when constructing logging events" if "base_msg" in kwargs: kwargs["base_msg"] = str(kwargs["base_msg"]) if "msg" in kwargs: @@ -92,7 +94,9 @@ def __getattr__(self, key): return super().__getattribute__("pb_msg").__getattribute__(key) def to_dict(self): - return MessageToDict(self.pb_msg, preserving_proto_field_name=True, including_default_value_fields=True) + return MessageToDict( + self.pb_msg, preserving_proto_field_name=True, including_default_value_fields=True + ) def to_json(self) -> str: return MessageToJson( @@ -125,7 +129,6 @@ class EventMsg(Protocol): def msg_from_base_event(event: BaseEvent, level: Optional[EventLevel] = None): - msg_class_name = f"{type(event).__name__}Msg" msg_cls = getattr(event.PROTO_TYPES_MODULE, msg_class_name) diff --git a/dbt_common/events/event_handler.py b/dbt_common/events/event_handler.py index 58e23a13..d49daabb 100644 --- a/dbt_common/events/event_handler.py +++ b/dbt_common/events/event_handler.py @@ -17,7 +17,8 @@ class DbtEventLoggingHandler(logging.Handler): - """A logging handler that wraps the EventManager + """A logging handler that wraps the EventManager. + This allows non-dbt packages to log to the dbt event stream. All logs are generated as "Note" events. """ diff --git a/dbt_common/events/event_manager.py b/dbt_common/events/event_manager.py index c41b0983..96e61f6b 100644 --- a/dbt_common/events/event_manager.py +++ b/dbt_common/events/event_manager.py @@ -20,7 +20,8 @@ def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None: msg.SerializeToString() except Exception as exc: raise Exception( - f"{msg.info.name} is not serializable to binary. Originating exception: {exc}, {traceback.format_exc()}" + f"{msg.info.name} is not serializable to binary. ", + f"Originating exception: {exc}, {traceback.format_exc()}", ) for logger in self.loggers: @@ -31,7 +32,9 @@ def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None: callback(msg) def add_logger(self, config: LoggerConfig) -> None: - logger = _JsonLogger(config) if config.line_format == LineFormat.Json else _TextLogger(config) + logger = ( + _JsonLogger(config) if config.line_format == LineFormat.Json else _TextLogger(config) + ) self.loggers.append(logger) def flush(self) -> None: diff --git a/dbt_common/events/format.py b/dbt_common/events/format.py index f87e464b..fd32c129 100644 --- a/dbt_common/events/format.py +++ b/dbt_common/events/format.py @@ -30,7 +30,9 @@ def format_fancy_output_line( else: status_time = " in {execution_time:0.2f}s".format(execution_time=execution_time) - output = "{justified} [{status}{status_time}]".format(justified=justified, status=status, status_time=status_time) + output = "{justified} [{status}{status_time}]".format( + justified=justified, status=status, status_time=status_time + ) return output diff --git a/dbt_common/events/functions.py b/dbt_common/events/functions.py index fde2219b..fe29a543 100644 --- a/dbt_common/events/functions.py +++ b/dbt_common/events/functions.py @@ -3,7 +3,7 @@ from dbt_common.events.event_manager_client import get_event_manager from dbt_common.invocation import get_invocation_id from dbt_common.helper_types import WarnErrorOptions -from dbt_common.utils import ForgivingJSONEncoder +from dbt_common.utils.encoding import ForgivingJSONEncoder from dbt_common.events.base_types import BaseEvent, EventLevel, EventMsg from dbt_common.events.logger import LoggerConfig, LineFormat from dbt_common.exceptions import scrub_secrets, env_secrets @@ -94,20 +94,27 @@ def msg_to_dict(msg: EventMsg) -> dict: msg_dict = dict() try: msg_dict = MessageToDict( - msg, preserving_proto_field_name=True, including_default_value_fields=True # type: ignore + msg.data, + preserving_proto_field_name=True, + including_default_value_fields=True, # type: ignore ) except Exception as exc: event_type = type(msg).__name__ - fire_event(Note(msg=f"type {event_type} is not serializable. {str(exc)}"), level=EventLevel.WARN) + fire_event( + Note(msg=f"type {event_type} is not serializable. {str(exc)}"), level=EventLevel.WARN + ) # We don't want an empty NodeInfo in output - if "data" in msg_dict and "node_info" in msg_dict["data"] and msg_dict["data"]["node_info"]["node_name"] == "": + if ( + "data" in msg_dict + and "node_info" in msg_dict["data"] + and msg_dict["data"]["node_info"]["node_name"] == "" + ): del msg_dict["data"]["node_info"] return msg_dict def warn_or_error(event, node=None) -> None: if WARN_ERROR or WARN_ERROR_OPTIONS.includes(type(event).__name__): - # TODO: resolve this circular import when at top from dbt_common.exceptions import EventCompilationError @@ -118,13 +125,17 @@ def warn_or_error(event, node=None) -> None: # an alternative to fire_event which only creates and logs the event value # if the condition is met. Does nothing otherwise. -def fire_event_if(conditional: bool, lazy_e: Callable[[], BaseEvent], level: Optional[EventLevel] = None) -> None: +def fire_event_if( + conditional: bool, lazy_e: Callable[[], BaseEvent], level: Optional[EventLevel] = None +) -> None: if conditional: fire_event(lazy_e(), level=level) # a special case of fire_event_if, to only fire events in our unit/functional tests -def fire_event_if_test(lazy_e: Callable[[], BaseEvent], level: Optional[EventLevel] = None) -> None: +def fire_event_if_test( + lazy_e: Callable[[], BaseEvent], level: Optional[EventLevel] = None +) -> None: fire_event_if(conditional=("pytest" in sys.modules), lazy_e=lazy_e, level=level) @@ -140,7 +151,9 @@ def get_metadata_vars() -> Dict[str, str]: global metadata_vars if metadata_vars is None: metadata_vars = { - k[len(_METADATA_ENV_PREFIX) :]: v for k, v in os.environ.items() if k.startswith(_METADATA_ENV_PREFIX) + k[len(_METADATA_ENV_PREFIX) :]: v + for k, v in os.environ.items() + if k.startswith(_METADATA_ENV_PREFIX) } return metadata_vars diff --git a/dbt_common/events/logger.py b/dbt_common/events/logger.py index ece7f283..f7a0a139 100644 --- a/dbt_common/events/logger.py +++ b/dbt_common/events/logger.py @@ -11,7 +11,7 @@ from dbt_common.events.base_types import EventLevel, EventMsg from dbt_common.events.format import timestamp_to_datetime_string -from dbt_common.utils import ForgivingJSONEncoder +from dbt_common.utils.encoding import ForgivingJSONEncoder # A Filter is a function which takes a BaseEvent and returns True if the event # should be logged, False otherwise. @@ -51,19 +51,21 @@ class LineFormat(Enum): # We need this function for now because the numeric log severity levels in # Python do not match those for logbook, so we have to explicitly call the # correct function by name. -def send_to_logger(l, level: str, log_line: str): +def send_to_logger(logger, level: str, log_line: str): if level == "test": - l.debug(log_line) + logger.debug(log_line) elif level == "debug": - l.debug(log_line) + logger.debug(log_line) elif level == "info": - l.info(log_line) + logger.info(log_line) elif level == "warn": - l.warning(log_line) + logger.warning(log_line) elif level == "error": - l.error(log_line) + logger.error(log_line) else: - raise AssertionError(f"While attempting to log {log_line}, encountered the unhandled level: {level}") + raise AssertionError( + f"While attempting to log {log_line}, encountered the unhandled level: {level}" + ) @dataclass @@ -150,7 +152,9 @@ def create_debug_line(self, msg: EventMsg) -> str: log_line = f"\n\n{separator} {ts} | {self.invocation_id} {separator}\n" scrubbed_msg: str = self.scrubber(msg.info.msg) # type: ignore level = msg.info.level - log_line += f"{self._get_color_tag()}{ts} [{level:<5}]{self._get_thread_name()} {scrubbed_msg}" + log_line += ( + f"{self._get_color_tag()}{ts} [{level:<5}]{self._get_thread_name()} {scrubbed_msg}" + ) return log_line def _get_color_tag(self) -> str: diff --git a/dbt_common/events/types.py b/dbt_common/events/types.py index 0ee5cd00..3c4b02e0 100644 --- a/dbt_common/events/types.py +++ b/dbt_common/events/types.py @@ -113,9 +113,12 @@ def message(self) -> str: class Note(InfoLevel): - """The Note event provides a way to log messages which aren't likely to be + """Unstructured events. + + The Note event provides a way to log messages which aren't likely to be useful as more structured events. For console formatting text like empty - lines and separator bars, use the Formatting event instead.""" + lines and separator bars, use the Formatting event instead. + """ def code(self) -> str: return "Z050" diff --git a/dbt_common/events/types_pb2.py b/dbt_common/events/types_pb2.py index babf7158..0bf1c693 100644 --- a/dbt_common/events/types_pb2.py +++ b/dbt_common/events/types_pb2.py @@ -2,10 +2,10 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: types.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -16,54 +16,53 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0btypes.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\"\x91\x02\n\tEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x05\x65xtra\x18\t \x03(\x0b\x32!.proto_types.EventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"6\n\x0eGenericMessage\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\"1\n\x11RetryExternalCall\x12\x0f\n\x07\x61ttempt\x18\x01 \x01(\x05\x12\x0b\n\x03max\x18\x02 \x01(\x05\"j\n\x14RetryExternalCallMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.RetryExternalCall\"#\n\x14RecordRetryException\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"p\n\x17RecordRetryExceptionMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.RecordRetryException\"@\n\x13SystemCouldNotWrite\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0e\n\x06reason\x18\x02 \x01(\t\x12\x0b\n\x03\x65xc\x18\x03 \x01(\t\"n\n\x16SystemCouldNotWriteMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.SystemCouldNotWrite\"!\n\x12SystemExecutingCmd\x12\x0b\n\x03\x63md\x18\x01 \x03(\t\"l\n\x15SystemExecutingCmdMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.SystemExecutingCmd\"\x1c\n\x0cSystemStdOut\x12\x0c\n\x04\x62msg\x18\x01 \x01(\t\"`\n\x0fSystemStdOutMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.SystemStdOut\"\x1c\n\x0cSystemStdErr\x12\x0c\n\x04\x62msg\x18\x01 \x01(\t\"`\n\x0fSystemStdErrMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.SystemStdErr\",\n\x16SystemReportReturnCode\x12\x12\n\nreturncode\x18\x01 \x01(\x05\"t\n\x19SystemReportReturnCodeMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.SystemReportReturnCode\"\x19\n\nFormatting\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\\\n\rFormattingMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.Formatting\"\x13\n\x04Note\x12\x0b\n\x03msg\x18\x01 \x01(\t\"P\n\x07NoteMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x1f\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x11.proto_types.Noteb\x06proto3') -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'types_pb2', _globals) +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'types_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None _EVENTINFO_EXTRAENTRY._options = None _EVENTINFO_EXTRAENTRY._serialized_options = b'8\001' - _globals['_EVENTINFO']._serialized_start=62 - _globals['_EVENTINFO']._serialized_end=335 - _globals['_EVENTINFO_EXTRAENTRY']._serialized_start=291 - _globals['_EVENTINFO_EXTRAENTRY']._serialized_end=335 - _globals['_GENERICMESSAGE']._serialized_start=337 - _globals['_GENERICMESSAGE']._serialized_end=391 - _globals['_RETRYEXTERNALCALL']._serialized_start=393 - _globals['_RETRYEXTERNALCALL']._serialized_end=442 - _globals['_RETRYEXTERNALCALLMSG']._serialized_start=444 - _globals['_RETRYEXTERNALCALLMSG']._serialized_end=550 - _globals['_RECORDRETRYEXCEPTION']._serialized_start=552 - _globals['_RECORDRETRYEXCEPTION']._serialized_end=587 - _globals['_RECORDRETRYEXCEPTIONMSG']._serialized_start=589 - _globals['_RECORDRETRYEXCEPTIONMSG']._serialized_end=701 - _globals['_SYSTEMCOULDNOTWRITE']._serialized_start=703 - _globals['_SYSTEMCOULDNOTWRITE']._serialized_end=767 - _globals['_SYSTEMCOULDNOTWRITEMSG']._serialized_start=769 - _globals['_SYSTEMCOULDNOTWRITEMSG']._serialized_end=879 - _globals['_SYSTEMEXECUTINGCMD']._serialized_start=881 - _globals['_SYSTEMEXECUTINGCMD']._serialized_end=914 - _globals['_SYSTEMEXECUTINGCMDMSG']._serialized_start=916 - _globals['_SYSTEMEXECUTINGCMDMSG']._serialized_end=1024 - _globals['_SYSTEMSTDOUT']._serialized_start=1026 - _globals['_SYSTEMSTDOUT']._serialized_end=1054 - _globals['_SYSTEMSTDOUTMSG']._serialized_start=1056 - _globals['_SYSTEMSTDOUTMSG']._serialized_end=1152 - _globals['_SYSTEMSTDERR']._serialized_start=1154 - _globals['_SYSTEMSTDERR']._serialized_end=1182 - _globals['_SYSTEMSTDERRMSG']._serialized_start=1184 - _globals['_SYSTEMSTDERRMSG']._serialized_end=1280 - _globals['_SYSTEMREPORTRETURNCODE']._serialized_start=1282 - _globals['_SYSTEMREPORTRETURNCODE']._serialized_end=1326 - _globals['_SYSTEMREPORTRETURNCODEMSG']._serialized_start=1328 - _globals['_SYSTEMREPORTRETURNCODEMSG']._serialized_end=1444 - _globals['_FORMATTING']._serialized_start=1446 - _globals['_FORMATTING']._serialized_end=1471 - _globals['_FORMATTINGMSG']._serialized_start=1473 - _globals['_FORMATTINGMSG']._serialized_end=1565 - _globals['_NOTE']._serialized_start=1567 - _globals['_NOTE']._serialized_end=1586 - _globals['_NOTEMSG']._serialized_start=1588 - _globals['_NOTEMSG']._serialized_end=1668 + _EVENTINFO._serialized_start=62 + _EVENTINFO._serialized_end=335 + _EVENTINFO_EXTRAENTRY._serialized_start=291 + _EVENTINFO_EXTRAENTRY._serialized_end=335 + _GENERICMESSAGE._serialized_start=337 + _GENERICMESSAGE._serialized_end=391 + _RETRYEXTERNALCALL._serialized_start=393 + _RETRYEXTERNALCALL._serialized_end=442 + _RETRYEXTERNALCALLMSG._serialized_start=444 + _RETRYEXTERNALCALLMSG._serialized_end=550 + _RECORDRETRYEXCEPTION._serialized_start=552 + _RECORDRETRYEXCEPTION._serialized_end=587 + _RECORDRETRYEXCEPTIONMSG._serialized_start=589 + _RECORDRETRYEXCEPTIONMSG._serialized_end=701 + _SYSTEMCOULDNOTWRITE._serialized_start=703 + _SYSTEMCOULDNOTWRITE._serialized_end=767 + _SYSTEMCOULDNOTWRITEMSG._serialized_start=769 + _SYSTEMCOULDNOTWRITEMSG._serialized_end=879 + _SYSTEMEXECUTINGCMD._serialized_start=881 + _SYSTEMEXECUTINGCMD._serialized_end=914 + _SYSTEMEXECUTINGCMDMSG._serialized_start=916 + _SYSTEMEXECUTINGCMDMSG._serialized_end=1024 + _SYSTEMSTDOUT._serialized_start=1026 + _SYSTEMSTDOUT._serialized_end=1054 + _SYSTEMSTDOUTMSG._serialized_start=1056 + _SYSTEMSTDOUTMSG._serialized_end=1152 + _SYSTEMSTDERR._serialized_start=1154 + _SYSTEMSTDERR._serialized_end=1182 + _SYSTEMSTDERRMSG._serialized_start=1184 + _SYSTEMSTDERRMSG._serialized_end=1280 + _SYSTEMREPORTRETURNCODE._serialized_start=1282 + _SYSTEMREPORTRETURNCODE._serialized_end=1326 + _SYSTEMREPORTRETURNCODEMSG._serialized_start=1328 + _SYSTEMREPORTRETURNCODEMSG._serialized_end=1444 + _FORMATTING._serialized_start=1446 + _FORMATTING._serialized_end=1471 + _FORMATTINGMSG._serialized_start=1473 + _FORMATTINGMSG._serialized_end=1565 + _NOTE._serialized_start=1567 + _NOTE._serialized_end=1586 + _NOTEMSG._serialized_start=1588 + _NOTEMSG._serialized_end=1668 # @@protoc_insertion_point(module_scope) diff --git a/dbt_common/exceptions/base.py b/dbt_common/exceptions/base.py index 0ec53d2a..db619326 100644 --- a/dbt_common/exceptions/base.py +++ b/dbt_common/exceptions/base.py @@ -92,9 +92,7 @@ def type(self): return "Runtime" def node_to_string(self, node: Any): - """ - Given a node-like object we attempt to create the best identifier we can - """ + """Given a node-like object we attempt to create the best identifier we can.""" result = "" if hasattr(node, "resource_type"): result += node.resource_type @@ -125,8 +123,9 @@ def process_stack(self): return lines def validator_error_message(self, exc: builtins.Exception): - """Given a dbt.dataclass_schema.ValidationError (which is basically a - jsonschema.ValidationError), return the relevant parts as a string + """Given a dbt.dataclass_schema.ValidationError return the relevant parts as a string. + + dbt.dataclass_schema.ValidationError is basically a jsonschema.ValidationError) """ if not isinstance(exc, ValidationError): return str(exc) @@ -175,9 +174,14 @@ def type(self): def _fix_dupe_msg(self, path_1: str, path_2: str, name: str, type_name: str) -> str: if path_1 == path_2: - return f"remove one of the {type_name} entries for {name} in this file:\n - {path_1!s}\n" + return ( + f"remove one of the {type_name} entries for {name} in this file:\n - {path_1!s}\n" + ) else: - return f"remove the {type_name} entry for {name} in one of these files:\n" f" - {path_1!s}\n{path_2!s}" + return ( + f"remove the {type_name} entry for {name} in one of these files:\n" + f" - {path_1!s}\n{path_2!s}" + ) class RecursionError(DbtRuntimeError): diff --git a/dbt_common/exceptions/cache.py b/dbt_common/exceptions/cache.py index 6dc21539..73bdf3f7 100644 --- a/dbt_common/exceptions/cache.py +++ b/dbt_common/exceptions/cache.py @@ -15,7 +15,9 @@ class NewNameAlreadyInCacheError(CacheInconsistencyError): def __init__(self, old_key: str, new_key: str): self.old_key = old_key self.new_key = new_key - msg = f'in rename of "{self.old_key}" -> "{self.new_key}", new name is in the cache already' + msg = ( + f'in rename of "{self.old_key}" -> "{self.new_key}", new name is in the cache already' + ) super().__init__(msg) @@ -55,7 +57,10 @@ def get_message(self) -> str: else: message_addendum = "" - msg = f"in rename, new key {self.new_key} already in cache: {list(self.relations.keys())}{message_addendum}" + msg = ( + f"in rename, new key {self.new_key} already in " + f"cache: {list(self.relations.keys())}{message_addendum}" + ) return msg diff --git a/dbt_common/exceptions/connection.py b/dbt_common/exceptions/connection.py index 2638f32b..85d37735 100644 --- a/dbt_common/exceptions/connection.py +++ b/dbt_common/exceptions/connection.py @@ -1,7 +1,8 @@ class ConnectionError(Exception): - """ - There was a problem with the connection that returned a bad response, - timed out, or resulted in a file that is corrupt. + """ConnectionError. + + Connection that returned a bad response, timed out, or resulted + in a file that is corrupt. """ pass diff --git a/dbt_common/exceptions/jinja.py b/dbt_common/exceptions/jinja.py index f689b2ad..8edfd87a 100644 --- a/dbt_common/exceptions/jinja.py +++ b/dbt_common/exceptions/jinja.py @@ -24,7 +24,8 @@ def __init__(self, block_type_name: str, linecount: int) -> None: def get_message(self) -> str: msg = ( - f"Reached EOF without finding a close tag for {self.block_type_name} (searched from line {self.linecount})" + "Reached EOF without finding a close tag for " + f"{self.block_type_name} (searched from line {self.linecount})" ) return msg diff --git a/dbt_common/exceptions/macros.py b/dbt_common/exceptions/macros.py index 5fbefce3..34a7672e 100644 --- a/dbt_common/exceptions/macros.py +++ b/dbt_common/exceptions/macros.py @@ -4,10 +4,9 @@ class MacroReturn(DbtBaseException): - """ + """This is how we return a value from a macro, not an exception. + Hack of all hacks - This is not actually an exception. - It's how we return a value from a macro. """ def __init__(self, value) -> None: @@ -66,7 +65,10 @@ def __init__(self, kwarg_value) -> None: super().__init__(msg=self.get_message()) def get_message(self) -> str: - msg = f"The macro_name parameter ({self.kwarg_value}) " "to adapter.dispatch was not a string" + msg = ( + f"The macro_name parameter ({self.kwarg_value}) " + "to adapter.dispatch was not a string" + ) return msg @@ -102,6 +104,9 @@ def __init__(self, freshness_macro_name: str, table): super().__init__(msg=self.get_message()) def get_message(self) -> str: - msg = f'Got an invalid result from "{self.freshness_macro_name}" macro: {[tuple(r) for r in self.table]}' + msg = ( + f'Got an invalid result from "{self.freshness_macro_name}" ' + f"macro: {[tuple(r) for r in self.table]}" + ) return msg diff --git a/dbt_common/helper_types.py b/dbt_common/helper_types.py index d8631f38..b62c6595 100644 --- a/dbt_common/helper_types.py +++ b/dbt_common/helper_types.py @@ -25,7 +25,7 @@ def __eq__(self, other): @dataclass class NoValue(dbtClassMixin): - """Sometimes, you want a way to say none that isn't None""" + """Sometimes, you want a way to say none that isn't None!""" novalue: NVEnum = field(default_factory=lambda: NVEnum.novalue) @@ -39,10 +39,14 @@ class IncludeExclude(dbtClassMixin): def __post_init__(self): if isinstance(self.include, str) and self.include not in self.INCLUDE_ALL: - raise ValidationError(f"include must be one of {self.INCLUDE_ALL} or a list of strings") + raise ValidationError( + f"include must be one of {self.INCLUDE_ALL} or a list of strings" + ) if self.exclude and self.include not in self.INCLUDE_ALL: - raise ValidationError(f"exclude can only be specified if include is one of {self.INCLUDE_ALL}") + raise ValidationError( + f"exclude can only be specified if include is one of {self.INCLUDE_ALL}" + ) if isinstance(self.include, list): self._validate_items(self.include) @@ -51,7 +55,9 @@ def __post_init__(self): self._validate_items(self.exclude) def includes(self, item_name: str): - return (item_name in self.include or self.include in self.INCLUDE_ALL) and item_name not in self.exclude + return ( + item_name in self.include or self.include in self.INCLUDE_ALL + ) and item_name not in self.exclude def _validate_items(self, items: List[str]): pass diff --git a/dbt_common/semver.py b/dbt_common/semver.py index 64620c53..951f4e8e 100644 --- a/dbt_common/semver.py +++ b/dbt_common/semver.py @@ -87,14 +87,18 @@ def to_version_string(self, skip_matcher=False): if not skip_matcher: matcher = self.matcher - return "{}{}.{}.{}{}{}".format(matcher, self.major, self.minor, self.patch, prerelease, build) + return "{}{}.{}.{}{}{}".format( + matcher, self.major, self.minor, self.patch, prerelease, build + ) @classmethod def from_version_string(cls, version_string): match = _VERSION_REGEX.match(version_string) if not match: - raise dbt_common.exceptions.base.SemverError(f'"{version_string}" is not a valid semantic version.') + raise dbt_common.exceptions.base.SemverError( + f'"{version_string}" is not a valid semantic version.' + ) matched = {k: v for k, v in match.groupdict().items() if v is not None} @@ -154,15 +158,22 @@ def compare(self, other): return -1 # else is equal and will fall through - equal = (self.matcher == Matchers.GREATER_THAN_OR_EQUAL and other.matcher == Matchers.LESS_THAN_OR_EQUAL) or ( - self.matcher == Matchers.LESS_THAN_OR_EQUAL and other.matcher == Matchers.GREATER_THAN_OR_EQUAL + equal = ( + self.matcher == Matchers.GREATER_THAN_OR_EQUAL + and other.matcher == Matchers.LESS_THAN_OR_EQUAL + ) or ( + self.matcher == Matchers.LESS_THAN_OR_EQUAL + and other.matcher == Matchers.GREATER_THAN_OR_EQUAL ) if equal: return 0 lt = ( (self.matcher == Matchers.LESS_THAN and other.matcher == Matchers.LESS_THAN_OR_EQUAL) - or (other.matcher == Matchers.GREATER_THAN and self.matcher == Matchers.GREATER_THAN_OR_EQUAL) + or ( + other.matcher == Matchers.GREATER_THAN + and self.matcher == Matchers.GREATER_THAN_OR_EQUAL + ) or (self.is_upper_bound and other.is_lower_bound) ) if lt: @@ -170,7 +181,10 @@ def compare(self, other): gt = ( (other.matcher == Matchers.LESS_THAN and self.matcher == Matchers.LESS_THAN_OR_EQUAL) - or (self.matcher == Matchers.GREATER_THAN and other.matcher == Matchers.GREATER_THAN_OR_EQUAL) + or ( + self.matcher == Matchers.GREATER_THAN + and other.matcher == Matchers.GREATER_THAN_OR_EQUAL + ) or (self.is_lower_bound and other.is_upper_bound) ) if gt: @@ -340,7 +354,9 @@ def to_version_string_pair(self): class UnboundedVersionSpecifier(VersionSpecifier): def __init__(self, *args, **kwargs) -> None: - super().__init__(matcher=Matchers.EXACT, major=None, minor=None, patch=None, prerelease=None, build=None) + super().__init__( + matcher=Matchers.EXACT, major=None, minor=None, patch=None, prerelease=None, build=None + ) def __str__(self): return "*" @@ -451,5 +467,7 @@ def filter_installable(versions: List[str], install_prerelease: bool) -> List[st installable.append(version) installable_dict[str(version)] = version_string sorted_installable = sorted(installable) - sorted_installable_original_versions = [str(installable_dict.get(str(version))) for version in sorted_installable] + sorted_installable_original_versions = [ + str(installable_dict.get(str(version))) for version in sorted_installable + ] return sorted_installable_original_versions diff --git a/dbt_common/ui.py b/dbt_common/ui.py index 2cc7c5ef..291d5f06 100644 --- a/dbt_common/ui.py +++ b/dbt_common/ui.py @@ -58,7 +58,8 @@ def red(text: str) -> str: def line_wrap_message(msg: str, subtract: int = 0, dedent: bool = True, prefix: str = "") -> str: - """ + """Line wrap a message to a given printer width. + Line wrap the given message to PRINTER_WIDTH - {subtract}. Convert double newlines to newlines and avoid calling textwrap.fill() on them (like markdown) diff --git a/dbt_common/utils/connection.py b/dbt_common/utils/connection.py index 890c3e99..5c76fe7f 100644 --- a/dbt_common/utils/connection.py +++ b/dbt_common/utils/connection.py @@ -8,7 +8,9 @@ def connection_exception_retry(fn, max_attempts: int, attempt: int = 0): - """Attempts to run a function that makes an external call, if the call fails + """Handle connection retries gracefully. + + Attempts to run a function that makes an external call, if the call fails on a Requests exception or decompression issue (ReadError), it will be tried up to 5 more times. All exceptions that Requests explicitly raises inherit from requests.exceptions.RequestException. See https://github.com/dbt-labs/dbt-core/issues/4579 diff --git a/dbt_common/utils/dict.py b/dbt_common/utils/dict.py index ff97d185..ddb1246d 100644 --- a/dbt_common/utils/dict.py +++ b/dbt_common/utils/dict.py @@ -37,11 +37,15 @@ def _merge(a, b): return to_return -# http://stackoverflow.com/questions/20656135/python-deep-merge-dictionary-data def deep_merge(*args): - """ - >>> dbt_common.utils.deep_merge({'a': 1, 'b': 2, 'c': 3}, {'a': 2}, {'a': 3, 'b': 1}) # noqa + """Deep merge dictionaries. + + Example: + >>> dbt_common.utils.deep_merge( + ... {"a": 1, "b": 2, "c": 3}, {"a": 2}, {"a": 3, "b": 1} + ... ) # noqa {'a': 3, 'b': 1, 'c': 3} + From: http://stackoverflow.com/questions/20656135/python-deep-merge-dictionary-data """ if len(args) == 0: return None @@ -93,14 +97,17 @@ def _deep_map_render( else: container_types: Tuple[Type[Any], ...] = (list, dict) ok_types = container_types + atomic_types - raise DbtConfigError("in _deep_map_render, expected one of {!r}, got {!r}".format(ok_types, type(value))) + raise DbtConfigError( + "in _deep_map_render, expected one of {!r}, got {!r}".format(ok_types, type(value)) + ) return ret def deep_map_render(func: Callable[[Any, Tuple[Union[str, int], ...]], Any], value: Any) -> Any: - """This function renders a nested dictionary derived from a yaml - file. It is used to render dbt_project.yml, profiles.yml, and + """This function renders a nested dictionary derived from a yaml file. + + It is used to render dbt_project.yml, profiles.yml, and schema files. It maps the function func() onto each non-container value in 'value' diff --git a/dbt_common/utils/encoding.py b/dbt_common/utils/encoding.py index c741e52f..96acb9c8 100644 --- a/dbt_common/utils/encoding.py +++ b/dbt_common/utils/encoding.py @@ -9,7 +9,7 @@ DECIMALS: Tuple[Type[Any], ...] try: - import cdecimal # typing: ignore + import cdecimal # type: ignore except ImportError: DECIMALS = (decimal.Decimal,) else: @@ -24,7 +24,9 @@ def md5(string, charset="utf-8"): class JSONEncoder(json.JSONEncoder): - """A 'custom' json encoder that does normal json encoder things, but also + """A 'custom' json encoder. + + A 'custom' json encoder that does normal json encoder things, but also handles `Decimal`s and `Undefined`s. Decimals can lose precision because they get converted to floats. Undefined's are serialized to an empty string """ diff --git a/dbt_common/utils/executor.py b/dbt_common/utils/executor.py index afe5d6da..819a0e3a 100644 --- a/dbt_common/utils/executor.py +++ b/dbt_common/utils/executor.py @@ -20,9 +20,13 @@ def submit(*args, **kwargs): if len(args) >= 2: self, fn, *args = args elif not args: - raise TypeError("descriptor 'submit' of 'SingleThreadedExecutor' object needs an argument") + raise TypeError( + "descriptor 'submit' of 'SingleThreadedExecutor' object needs an argument" + ) else: - raise TypeError("submit expected at least 1 positional argument, got %d" % (len(args) - 1)) + raise TypeError( + "submit expected at least 1 positional argument, got %d" % (len(args) - 1) + ) fut = concurrent.futures.Future() try: result = fn(*args, **kwargs) diff --git a/pyproject.toml b/pyproject.toml index d1d7e3ab..d377572e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,8 +9,15 @@ keywords = [] authors = [ { name = "dbt Labs", email = "info@dbtlabs.com" }, ] +maintainers = [ + { name = "dbt Labs", email = "info@dbtlabs.com" }, +] classifiers = [ - "Development Status :: 4 - Beta", + "Development Status :: 2 - Pre-Alpha", + "License :: OSI Approved :: Apache Software License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -26,16 +33,117 @@ dependencies = [ "jsonschema~=4.0", "Jinja2~=3.0", "mashumaro[msgpack]~=3.9", - "pathspec>=0.9,<0.12", # TODO: I'm not sure this is needed. check search.py? + "pathspec>=0.9,<0.12", "protobuf>=4.0.0", "python-dateutil~=2.0", "requests<3.0.0", "typing-extensions~=4.4", ] +[project.optional-dependencies] +lint = [ + "black~=23.3", + "flake8", + "flake8-pyproject", + "flake8-docstrings", + "mypy~=1.3", + "pytest~=7.3", # needed for linting tests + "types-Jinja2~=2.11", + "types-jsonschema~=4.17", + "types-protobuf~=4.24.0", + "types-python-dateutil~=2.8", + "types-PyYAML~=6.0", + "types-requests" +] +test = [ + "pytest~=7.3", + "pytest-xdist~=3.2", + "hypothesis~=6.87" +] +build = [ + "wheel", + "twine", + "check-wheel-contents", +] + +[project.urls] +Homepage = "https://github.com/dbt-labs/dbt-common" +Repository = "https://github.com/dbt-labs/dbt-common.git" +Issues = "https://github.com/dbt-labs/dbt-common/issues" +Changelog = "https://github.com/dbt-labs/dbt-common/blob/main/CHANGELOG.md" + [tool.hatch.version] path = "dbt_common/__about__.py" +### Default env & scripts + +[tool.hatch.envs.default] +description = "Default environment with dependencies for running dbt-common" +features = ["lint", "test"] + +[tool.hatch.envs.default.scripts] +proto = "protoc -I=./dbt_common/events --python_out=./dbt_common/events ./dbt_common/events/types.proto" + +### Test settings, envs & scripts + +[tool.hatch.envs.test] +description = "Env for running development commands for testing" +features = ["test"] + +[tool.hatch.envs.test.scripts] +unit = "- python -m pytest {args:tests/unit}" + + +### Linting settings, envs & scripts + +[tool.hatch.envs.lint] +type = "virtual" +description = "Env for running development commands for linting" +features = ["lint"] + +[tool.hatch.envs.lint.scripts] +all = [ + "- black", + "- flake8", + "- mypy", +] +black = "python -m black ." +flake8 = "python -m flake8 ." +mypy = "python -m mypy ." + +[tool.black] +extend-exclude = "dbt_common/events/types_pb2.py" +line-length = 99 +target-version = ['py38'] + +[tool.flake8] +max-line-length = 99 +select = ["E", "W", "F"] +ignore = ["E203", "E501", "E741", "W503", "W504"] +exclude = [ + "dbt_common/events/types_pb2.py", + "venv", + "env*" +] +per-file-ignores = ["*/__init__.py: F401"] +docstring-convention = "google" + +[tool.mypy] +mypy_path = "third-party-stubs/" +namespace_packages = true +warn_unused_configs = true +show_error_codes = true +disable_error_code = "attr-defined" # TODO: revisit once other mypy errors resolved +disallow_untyped_defs = false # TODO: add type annotations everywhere +warn_redundant_casts = true +exclude = [ + "dbt_common/events/types_pb2.py", + "env*", + "third-party-stubs/*", +] + +### Build settings, envs & scripts + [build-system] requires = ["hatchling"] build-backend = "hatchling.build" @@ -48,37 +156,14 @@ exclude = [ ".gitignore", ".pre-commit-config.yaml", "CONTRIBUTING.md", - "MAKEFILE", "/tests", ] [tool.hatch.build.targets.wheel] packages = ["dbt_common"] -[tool.hatch.envs.dev-env.scripts] -all = ["pre-commit run --all-files"] - -[tool.hatch.envs.dev-env] -description = "Env for running development commands like pytest / pre-commit" -dependencies = [ - "pytest~=7.3", - "pytest-xdist~=3.2", - "httpx~=0.24", - "hypothesis~=6.87", - "pre-commit~=3.2", - "isort~=5.12", - "black~=23.3", - "ruff==0.0.260", - "mypy~=1.3", - "pytest~=7.3", - "types-Jinja2~=2.11", - "types-jsonschema~=4.17", - "types-python-dateutil~=2.8", - "types-PyYAML~=6.0", -] - [tool.hatch.envs.build] -detached = true +description = "Env for running development commands for linting" features = ["build"] [tool.hatch.envs.build.scripts] @@ -96,43 +181,3 @@ check-sdist = [ "find ./dist/dbt_common-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", "pip freeze | grep dbt-common", ] -protobuf = "protoc -I=./dbt_common/events --python_out=./dbt_common/events ./dbt_common/events/types.proto" - -[tool.ruff] -line-length = 120 -select = [ - "E", # Pycodestyle - "F", # Pyflakes - "W", # Whitespace - "D", # Pydocs -] -ignore = [ - # Missing docstring in public module -- often docs handled within classes - "D100", - # Missing docstring in public package -- often docs handled within files not __init__.py - "D104" -] -# Let ruff autofix these errors. -# F401 - Unused imports. -fixable = ["F401"] - -[tool.ruff.pydocstyle] -convention = "google" - -[tool.mypy] -mypy_path = "third-party-stubs/" -namespace_packages = true -warn_unused_configs = true -disallow_untyped_defs = true -warn_redundant_casts = true - -# Don't run the extensive mypy checks on custom stubs -[[tool.mypy.overrides]] -module = ["logbook.*"] -disallow_untyped_defs = false - -[tool.isort] -profile = "black" - -[tool.black] -line-length = 120 diff --git a/tests/unit/test_agate_helper.py b/tests/unit/test_agate_helper.py index 2e3595a1..4c12bcd8 100644 --- a/tests/unit/test_agate_helper.py +++ b/tests/unit/test_agate_helper.py @@ -131,9 +131,15 @@ def test_merge_allnull(self): self.assertEqual(len(result), 4) def test_merge_mixed(self): - t1 = agate_helper.table_from_rows([(1, "a", None, None), (2, "b", None, None)], ("a", "b", "c", "d")) - t2 = agate_helper.table_from_rows([(3, "c", "dog", 1), (4, "d", "cat", 5)], ("a", "b", "c", "d")) - t3 = agate_helper.table_from_rows([(3, "c", None, 1.5), (4, "d", None, 3.5)], ("a", "b", "c", "d")) + t1 = agate_helper.table_from_rows( + [(1, "a", None, None), (2, "b", None, None)], ("a", "b", "c", "d") + ) + t2 = agate_helper.table_from_rows( + [(3, "c", "dog", 1), (4, "d", "cat", 5)], ("a", "b", "c", "d") + ) + t3 = agate_helper.table_from_rows( + [(3, "c", None, 1.5), (4, "d", None, 3.5)], ("a", "b", "c", "d") + ) result = agate_helper.merge_tables([t1, t2]) self.assertEqual(result.column_names, ("a", "b", "c", "d")) @@ -216,6 +222,5 @@ def test_nocast_bool_01(self): [True, Decimal(1)], [False, Decimal(0)], ] - for i, row in enumerate(tbl): self.assertEqual(list(row), expected[i]) diff --git a/tests/unit/test_core_dbt_utils.py b/tests/unit/test_core_dbt_utils.py index 3a31c60d..8a0e836e 100644 --- a/tests/unit/test_core_dbt_utils.py +++ b/tests/unit/test_core_dbt_utils.py @@ -8,29 +8,29 @@ class TestCommonDbtUtils(unittest.TestCase): def test_connection_exception_retry_none(self): - Counter._reset() - connection_exception_retry(lambda: Counter._add(), 5) + Counter._reset(self) + connection_exception_retry(lambda: Counter._add(self), 5) self.assertEqual(1, counter) def test_connection_exception_retry_success_requests_exception(self): - Counter._reset() - connection_exception_retry(lambda: Counter._add_with_requests_exception(), 5) + Counter._reset(self) + connection_exception_retry(lambda: Counter._add_with_requests_exception(self), 5) self.assertEqual(2, counter) # 2 = original attempt returned None, plus 1 retry def test_connection_exception_retry_max(self): - Counter._reset() + Counter._reset(self) with self.assertRaises(ConnectionError): - connection_exception_retry(lambda: Counter._add_with_exception(), 5) + connection_exception_retry(lambda: Counter._add_with_exception(self), 5) self.assertEqual(6, counter) # 6 = original attempt plus 5 retries def test_connection_exception_retry_success_failed_untar(self): - Counter._reset() - connection_exception_retry(lambda: Counter._add_with_untar_exception(), 5) + Counter._reset(self) + connection_exception_retry(lambda: Counter._add_with_untar_exception(self), 5) self.assertEqual(2, counter) # 2 = original attempt returned ReadError, plus 1 retry def test_connection_exception_retry_success_failed_eofexception(self): - Counter._reset() - connection_exception_retry(lambda: Counter._add_with_eof_exception(), 5) + Counter._reset(self) + connection_exception_retry(lambda: Counter._add_with_eof_exception(self), 5) self.assertEqual(2, counter) # 2 = original attempt returned EOFError, plus 1 retry @@ -38,36 +38,36 @@ def test_connection_exception_retry_success_failed_eofexception(self): class Counter: - def _add(): + def _add(self): global counter counter += 1 # All exceptions that Requests explicitly raises inherit from # requests.exceptions.RequestException so we want to make sure that raises plus one exception # that inherit from it for sanity - def _add_with_requests_exception(): + def _add_with_requests_exception(self): global counter counter += 1 if counter < 2: raise requests.exceptions.RequestException - def _add_with_exception(): + def _add_with_exception(self): global counter counter += 1 raise requests.exceptions.ConnectionError - def _add_with_untar_exception(): + def _add_with_untar_exception(self): global counter counter += 1 if counter < 2: raise tarfile.ReadError - def _add_with_eof_exception(): + def _add_with_eof_exception(self): global counter counter += 1 if counter < 2: raise EOFError - def _reset(): + def _reset(self): global counter counter = 0 diff --git a/tests/unit/test_helper_types.py b/tests/unit/test_helper_types.py index f3337478..17ebbd8e 100644 --- a/tests/unit/test_helper_types.py +++ b/tests/unit/test_helper_types.py @@ -34,7 +34,9 @@ def test_init_invalid_error(self): WarnErrorOptions(include=["InvalidError"], valid_error_names=set(["ValidError"])) with pytest.raises(ValidationError): - WarnErrorOptions(include="*", exclude=["InvalidError"], valid_error_names=set(["ValidError"])) + WarnErrorOptions( + include="*", exclude=["InvalidError"], valid_error_names=set(["ValidError"]) + ) def test_init_invalid_error_default_valid_error_names(self): with pytest.raises(ValidationError): @@ -44,7 +46,9 @@ def test_init_invalid_error_default_valid_error_names(self): WarnErrorOptions(include="*", exclude=["InvalidError"]) def test_init_valid_error(self): - warn_error_options = WarnErrorOptions(include=["ValidError"], valid_error_names=set(["ValidError"])) + warn_error_options = WarnErrorOptions( + include=["ValidError"], valid_error_names=set(["ValidError"]) + ) assert warn_error_options.include == ["ValidError"] assert warn_error_options.exclude == [] diff --git a/tests/unit/test_jinja.py b/tests/unit/test_jinja.py index 1fafa4a8..f038a1ec 100644 --- a/tests/unit/test_jinja.py +++ b/tests/unit/test_jinja.py @@ -8,7 +8,9 @@ class TestBlockLexer(unittest.TestCase): def test_basic(self): body = '{{ config(foo="bar") }}\r\nselect * from this.that\r\n' block_data = " \n\r\t{%- mytype foo %}" + body + "{%endmytype -%}" - blocks = extract_toplevel_blocks(block_data, allowed_blocks={"mytype"}, collect_raw_data=False) + blocks = extract_toplevel_blocks( + block_data, allowed_blocks={"mytype"}, collect_raw_data=False + ) self.assertEqual(len(blocks), 1) self.assertEqual(blocks[0].block_type_name, "mytype") self.assertEqual(blocks[0].block_name, "foo") @@ -17,7 +19,10 @@ def test_basic(self): def test_multiple(self): body_one = '{{ config(foo="bar") }}\r\nselect * from this.that\r\n' - body_two = "{{ config(bar=1)}}\r\nselect * from {% if foo %} thing " "{% else %} other_thing {% endif %}" + body_two = ( + "{{ config(bar=1)}}\r\nselect * from {% if foo %} thing " + "{% else %} other_thing {% endif %}" + ) block_data = ( " {% mytype foo %}" @@ -27,14 +32,18 @@ def test_multiple(self): + body_two + "{% endothertype %}" ) - blocks = extract_toplevel_blocks(block_data, allowed_blocks={"mytype", "othertype"}, collect_raw_data=False) + blocks = extract_toplevel_blocks( + block_data, allowed_blocks={"mytype", "othertype"}, collect_raw_data=False + ) self.assertEqual(len(blocks), 2) def test_comments(self): body = '{{ config(foo="bar") }}\r\nselect * from this.that\r\n' comment = "{# my comment #}" block_data = " \n\r\t{%- mytype foo %}" + body + "{%endmytype -%}" - blocks = extract_toplevel_blocks(comment + block_data, allowed_blocks={"mytype"}, collect_raw_data=False) + blocks = extract_toplevel_blocks( + comment + block_data, allowed_blocks={"mytype"}, collect_raw_data=False + ) self.assertEqual(len(blocks), 1) self.assertEqual(blocks[0].block_type_name, "mytype") self.assertEqual(blocks[0].block_name, "foo") @@ -43,9 +52,14 @@ def test_comments(self): def test_evil_comments(self): body = '{{ config(foo="bar") }}\r\nselect * from this.that\r\n' - comment = "{# external comment {% othertype bar %} select * from thing.other_thing{% endothertype %} #}" + comment = ( + "{# external comment {% othertype bar %} select * from " + "thing.other_thing{% endothertype %} #}" + ) block_data = " \n\r\t{%- mytype foo %}" + body + "{%endmytype -%}" - blocks = extract_toplevel_blocks(comment + block_data, allowed_blocks={"mytype"}, collect_raw_data=False) + blocks = extract_toplevel_blocks( + comment + block_data, allowed_blocks={"mytype"}, collect_raw_data=False + ) self.assertEqual(len(blocks), 1) self.assertEqual(blocks[0].block_type_name, "mytype") self.assertEqual(blocks[0].block_name, "foo") @@ -53,10 +67,18 @@ def test_evil_comments(self): self.assertEqual(blocks[0].full_block, block_data) def test_nested_comments(self): - body = '{# my comment #} {{ config(foo="bar") }}\r\nselect * from {# my other comment embedding {% endmytype %} #} this.that\r\n' + body = ( + '{# my comment #} {{ config(foo="bar") }}' + "\r\nselect * from {# my other comment embedding {% endmytype %} #} this.that\r\n" + ) block_data = " \n\r\t{%- mytype foo %}" + body + "{% endmytype -%}" - comment = "{# external comment {% othertype bar %} select * from thing.other_thing{% endothertype %} #}" - blocks = extract_toplevel_blocks(comment + block_data, allowed_blocks={"mytype"}, collect_raw_data=False) + comment = ( + "{# external comment {% othertype bar %} select * from " + "thing.other_thing{% endothertype %} #}" + ) + blocks = extract_toplevel_blocks( + comment + block_data, allowed_blocks={"mytype"}, collect_raw_data=False + ) self.assertEqual(len(blocks), 1) self.assertEqual(blocks[0].block_type_name, "mytype") self.assertEqual(blocks[0].block_name, "foo") @@ -86,9 +108,12 @@ def test_complex_file(self): def test_peaceful_macro_coexistence(self): body = ( - "{# my macro #} {% macro foo(a, b) %} do a thing {%- endmacro %} {# my model #} {% a b %} test {% enda %}" + "{# my macro #} {% macro foo(a, b) %} do a thing " + "{%- endmacro %} {# my model #} {% a b %} test {% enda %}" + ) + blocks = extract_toplevel_blocks( + body, allowed_blocks={"macro", "a"}, collect_raw_data=True ) - blocks = extract_toplevel_blocks(body, allowed_blocks={"macro", "a"}, collect_raw_data=True) self.assertEqual(len(blocks), 4) self.assertEqual(blocks[0].full_block, "{# my macro #} ") self.assertEqual(blocks[1].block_type_name, "macro") @@ -100,8 +125,13 @@ def test_peaceful_macro_coexistence(self): self.assertEqual(blocks[3].contents, " test ") def test_macro_with_trailing_data(self): - body = "{# my macro #} {% macro foo(a, b) %} do a thing {%- endmacro %} {# my model #} {% a b %} test {% enda %} raw data so cool" - blocks = extract_toplevel_blocks(body, allowed_blocks={"macro", "a"}, collect_raw_data=True) + body = ( + "{# my macro #} {% macro foo(a, b) %} do a thing {%- endmacro %} " + "{# my model #} {% a b %} test {% enda %} raw data so cool" + ) + blocks = extract_toplevel_blocks( + body, allowed_blocks={"macro", "a"}, collect_raw_data=True + ) self.assertEqual(len(blocks), 5) self.assertEqual(blocks[0].full_block, "{# my macro #} ") self.assertEqual(blocks[1].block_type_name, "macro") @@ -114,23 +144,33 @@ def test_macro_with_trailing_data(self): self.assertEqual(blocks[4].full_block, " raw data so cool") def test_macro_with_crazy_args(self): - body = """{% macro foo(a, b=asdf("cool this is 'embedded'" * 3) + external_var, c)%}cool{# block comment with {% endmacro %} in it #} stuff here {% endmacro %}""" + body = ( + """{% macro foo(a, b=asdf("cool this is 'embedded'" * 3) + external_var, c)%}""" + "cool{# block comment with {% endmacro %} in it #} stuff here " + "{% endmacro %}" + ) blocks = extract_toplevel_blocks(body, allowed_blocks={"macro"}, collect_raw_data=False) self.assertEqual(len(blocks), 1) self.assertEqual(blocks[0].block_type_name, "macro") self.assertEqual(blocks[0].block_name, "foo") - self.assertEqual(blocks[0].contents, "cool{# block comment with {% endmacro %} in it #} stuff here ") + self.assertEqual( + blocks[0].contents, "cool{# block comment with {% endmacro %} in it #} stuff here " + ) def test_materialization_parse(self): body = "{% materialization xxx, default %} ... {% endmaterialization %}" - blocks = extract_toplevel_blocks(body, allowed_blocks={"materialization"}, collect_raw_data=False) + blocks = extract_toplevel_blocks( + body, allowed_blocks={"materialization"}, collect_raw_data=False + ) self.assertEqual(len(blocks), 1) self.assertEqual(blocks[0].block_type_name, "materialization") self.assertEqual(blocks[0].block_name, "xxx") self.assertEqual(blocks[0].full_block, body) body = '{% materialization xxx, adapter="other" %} ... {% endmaterialization %}' - blocks = extract_toplevel_blocks(body, allowed_blocks={"materialization"}, collect_raw_data=False) + blocks = extract_toplevel_blocks( + body, allowed_blocks={"materialization"}, collect_raw_data=False + ) self.assertEqual(len(blocks), 1) self.assertEqual(blocks[0].block_type_name, "materialization") self.assertEqual(blocks[0].block_name, "xxx") @@ -193,8 +233,13 @@ def test_set_block(self): self.assertEqual(blocks[0].full_block, "{% myblock foo %}hi{% endmyblock %}") def test_crazy_set_statement(self): - body = '{% set x = (thing("{% myblock foo %}")) %}{% otherblock bar %}x{% endotherblock %}{% set y = otherthing("{% myblock foo %}") %}' - blocks = extract_toplevel_blocks(body, allowed_blocks={"otherblock"}, collect_raw_data=False) + body = ( + '{% set x = (thing("{% myblock foo %}")) %}{% otherblock bar %}x{% endotherblock %}' + '{% set y = otherthing("{% myblock foo %}") %}' + ) + blocks = extract_toplevel_blocks( + body, allowed_blocks={"otherblock"}, collect_raw_data=False + ) self.assertEqual(len(blocks), 1) self.assertEqual(blocks[0].full_block, "{% otherblock bar %}x{% endotherblock %}") self.assertEqual(blocks[0].block_type_name, "otherblock") @@ -213,15 +258,22 @@ def test_deceptive_do_statement(self): def test_do_block(self): body = "{% do %}thing.update(){% enddo %}{% myblock foo %}hi{% endmyblock %}" - blocks = extract_toplevel_blocks(body, allowed_blocks={"do", "myblock"}, collect_raw_data=False) + blocks = extract_toplevel_blocks( + body, allowed_blocks={"do", "myblock"}, collect_raw_data=False + ) self.assertEqual(len(blocks), 2) self.assertEqual(blocks[0].contents, "thing.update()") self.assertEqual(blocks[0].block_type_name, "do") self.assertEqual(blocks[1].full_block, "{% myblock foo %}hi{% endmyblock %}") def test_crazy_do_statement(self): - body = '{% do (thing("{% myblock foo %}")) %}{% otherblock bar %}x{% endotherblock %}{% do otherthing("{% myblock foo %}") %}{% myblock x %}hi{% endmyblock %}' - blocks = extract_toplevel_blocks(body, allowed_blocks={"myblock", "otherblock"}, collect_raw_data=False) + body = ( + '{% do (thing("{% myblock foo %}")) %}{% otherblock bar %}x{% endotherblock %}' + '{% do otherthing("{% myblock foo %}") %}{% myblock x %}hi{% endmyblock %}' + ) + blocks = extract_toplevel_blocks( + body, allowed_blocks={"myblock", "otherblock"}, collect_raw_data=False + ) self.assertEqual(len(blocks), 2) self.assertEqual(blocks[0].full_block, "{% otherblock bar %}x{% endotherblock %}") self.assertEqual(blocks[0].block_type_name, "otherblock") @@ -260,7 +312,10 @@ def test_quoted_endblock_within_block(self): self.assertEqual(blocks[0].contents, '{% set x = ("{% endmyblock %}") %} ') def test_docs_block(self): - body = '{% docs __my_doc__ %} asdf {# nope {% enddocs %}} #} {% enddocs %} {% docs __my_other_doc__ %} asdf "{% enddocs %}' + body = ( + "{% docs __my_doc__ %} asdf {# nope {% enddocs %}} #} {% enddocs %}" + '{% docs __my_other_doc__ %} asdf "{% enddocs %}' + ) blocks = extract_toplevel_blocks(body, allowed_blocks={"docs"}, collect_raw_data=False) self.assertEqual(len(blocks), 2) self.assertEqual(blocks[0].block_type_name, "docs") @@ -307,7 +362,10 @@ def test_for(self): def test_for_innocuous(self): # no for-loops over macros. - body = "{% for x in range(10) %}{% something my_something %} adsf {% endsomething %}{% endfor %}" + body = ( + "{% for x in range(10) %}{% something my_something %} adsf " + "{% endsomething %}{% endfor %}" + ) blocks = extract_toplevel_blocks(body) self.assertEqual(len(blocks), 1) self.assertEqual(blocks[0].full_block, body) @@ -317,7 +375,10 @@ def test_endif(self): with self.assertRaises(CompilationError) as err: extract_toplevel_blocks(body) self.assertIn( - "Got an unexpected control flow end tag, got endif but never saw a preceeding if (@ 1:53)", + ( + "Got an unexpected control flow end tag, got endif but " + "never saw a preceeding if (@ 1:53)" + ), str(err.exception), ) diff --git a/tests/unit/test_system_client.py b/tests/unit/test_system_client.py index 198802d6..a4dcc323 100644 --- a/tests/unit/test_system_client.py +++ b/tests/unit/test_system_client.py @@ -28,7 +28,7 @@ def get_profile_text(self): def tearDown(self): try: shutil.rmtree(self.tmp_dir) - except Exception as e: # noqa: [F841] + except Exception as e: # noqa: F841 pass def test__make_file_when_exists(self): @@ -46,7 +46,9 @@ def test__make_file_when_not_exists(self): def test__make_file_with_overwrite(self): self.set_up_profile() - written = dbt_common.clients.system.make_file(self.profiles_path, contents="NEW_TEXT", overwrite=True) + written = dbt_common.clients.system.make_file( + self.profiles_path, contents="NEW_TEXT", overwrite=True + ) self.assertTrue(written) self.assertEqual(self.get_profile_text(), "NEW_TEXT") @@ -81,7 +83,7 @@ def setUp(self): self.exists_cmd = ["echo", "hello"] os.mkdir(self.run_dir) - with open(self.empty_file, "w") as fp: # noqa: [F841] + with open(self.empty_file, "w") as fp: # noqa: F841 pass # "touch" def tearDown(self): @@ -199,14 +201,16 @@ def test_ignore_spec(self): self.tempdir, [""], "*.sql", - pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern, "sql-files*".splitlines()), + pathspec.PathSpec.from_lines( + pathspec.patterns.GitWildMatchPattern, "sql-files*".splitlines() + ), ) self.assertEqual(out, []) def tearDown(self): try: shutil.rmtree(self.base_dir) - except Exception as e: # noqa: [F841] + except Exception as e: # noqa: F841 pass @@ -219,7 +223,7 @@ def setUp(self): def tearDown(self): try: shutil.rmtree(self.base_dir) - except Exception as e: # noqa: [F841] + except Exception as e: # noqa: F841 pass def test_untar_package_success(self): @@ -242,7 +246,9 @@ def test_untar_package_success(self): def test_untar_package_failure(self): # create a text file then rename it as a tar (so it's invalid) - with NamedTemporaryFile(prefix="a", suffix=".txt", dir=self.tempdir, delete=False) as file_a: + with NamedTemporaryFile( + prefix="a", suffix=".txt", dir=self.tempdir, delete=False + ) as file_a: file_a.write(b"some text in the text file") txt_file_name = file_a.name file_path = os.path.dirname(txt_file_name) @@ -250,13 +256,14 @@ def test_untar_package_failure(self): os.rename(txt_file_name, tar_file_path) # now that we're set up, test that untarring the file fails - with self.assertRaises(tarfile.ReadError) as exc: # noqa: [F841] + with self.assertRaises(tarfile.ReadError) as exc: # noqa: F841 dbt_common.clients.system.untar_package(tar_file_path, self.tempdest) def test_untar_package_empty(self): # create a tarball with nothing in it - with NamedTemporaryFile(prefix="my-empty-package.2", suffix=".tar.gz", dir=self.tempdir) as named_file: - + with NamedTemporaryFile( + prefix="my-empty-package.2", suffix=".tar.gz", dir=self.tempdir + ) as named_file: # make sure we throw an error for the empty file with self.assertRaises(tarfile.ReadError) as exc: dbt_common.clients.system.untar_package(named_file.name, self.tempdest) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 0b417052..250c20cc 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -1,7 +1,7 @@ import unittest import dbt_common.exceptions -import dbt_common.utils +import dbt_common.utils.dict class TestDeepMerge(unittest.TestCase): @@ -16,11 +16,13 @@ def test__simple_cases(self): ] for case in cases: - actual = dbt_common.utils.deep_merge(*case["args"]) + actual = dbt_common.utils.dict.deep_merge(*case["args"]) self.assertEqual( case["expected"], actual, - "failed on {} (actual {}, expected {})".format(case["description"], actual, case["expected"]), + "failed on {} (actual {}, expected {})".format( + case["description"], actual, case["expected"] + ), ) @@ -36,11 +38,13 @@ def test__simple_cases(self): ] for case in cases: - actual = dbt_common.utils.deep_merge(*case["args"]) + actual = dbt_common.utils.dict.deep_merge(*case["args"]) self.assertEqual( case["expected"], actual, - "failed on {} (actual {}, expected {})".format(case["description"], actual, case["expected"]), + "failed on {} (actual {}, expected {})".format( + case["description"], actual, case["expected"] + ), ) @@ -87,15 +91,14 @@ def test__simple_cases(self): }, ], } - actual = dbt_common.utils.deep_map_render(self.intify_all, self.input_value) + actual = dbt_common.utils.dict.deep_map_render(self.intify_all, self.input_value) self.assertEqual(actual, expected) - actual = dbt_common.utils.deep_map_render(self.intify_all, expected) + actual = dbt_common.utils.dict.deep_map_render(self.intify_all, expected) self.assertEqual(actual, expected) @staticmethod def special_keypath(value, keypath): - if tuple(keypath) == ("foo", "baz", 1): return "hello" else: @@ -119,21 +122,21 @@ def test__keypath(self): }, ], } - actual = dbt_common.utils.deep_map_render(self.special_keypath, self.input_value) + actual = dbt_common.utils.dict.deep_map_render(self.special_keypath, self.input_value) self.assertEqual(actual, expected) - actual = dbt_common.utils.deep_map_render(self.special_keypath, expected) + actual = dbt_common.utils.dict.deep_map_render(self.special_keypath, expected) self.assertEqual(actual, expected) def test__noop(self): - actual = dbt_common.utils.deep_map_render(lambda x, _: x, self.input_value) + actual = dbt_common.utils.dict.deep_map_render(lambda x, _: x, self.input_value) self.assertEqual(actual, self.input_value) def test_trivial(self): cases = [[], {}, 1, "abc", None, True] for case in cases: - result = dbt_common.utils.deep_map_render(lambda x, _: x, case) + result = dbt_common.utils.dict.deep_map_render(lambda x, _: x, case) self.assertEqual(result, case) with self.assertRaises(dbt_common.exceptions.DbtConfigError): - dbt_common.utils.deep_map_render(lambda x, _: x, {"foo": object()}) + dbt_common.utils.dict.deep_map_render(lambda x, _: x, {"foo": object()}) diff --git a/third-party-stubs/agate/__init__.pyi b/third-party-stubs/agate/__init__.pyi new file mode 100644 index 00000000..c773cc7d --- /dev/null +++ b/third-party-stubs/agate/__init__.pyi @@ -0,0 +1,89 @@ +from collections.abc import Sequence + +from typing import Any, Optional, Callable, Iterable, Dict, Union + +from . import data_types as data_types +from .data_types import ( + Text as Text, + Number as Number, + Boolean as Boolean, + DateTime as DateTime, + Date as Date, + TimeDelta as TimeDelta, +) + +class MappedSequence(Sequence): + def __init__(self, values: Any, keys: Optional[Any] = ...) -> None: ... + def __unicode__(self): ... + def __getitem__(self, key: Any): ... + def __setitem__(self, key: Any, value: Any) -> None: ... + def __iter__(self): ... + def __len__(self): ... + def __eq__(self, other: Any): ... + def __ne__(self, other: Any): ... + def __contains__(self, value: Any): ... + def keys(self): ... + def values(self): ... + def items(self): ... + def get(self, key: Any, default: Optional[Any] = ...): ... + def dict(self): ... + +class Row(MappedSequence): ... + +class Table: + def __init__( + self, + rows: Any, + column_names: Optional[Any] = ..., + column_types: Optional[Any] = ..., + row_names: Optional[Any] = ..., + _is_fork: bool = ..., + ) -> None: ... + def __len__(self): ... + def __iter__(self): ... + def __getitem__(self, key: Any): ... + @property + def column_types(self): ... + @property + def column_names(self): ... + @property + def row_names(self): ... + @property + def columns(self): ... + @property + def rows(self): ... + def print_csv(self, **kwargs: Any) -> None: ... + def print_json(self, **kwargs: Any) -> None: ... + def where(self, test: Callable[[Row], bool]) -> "Table": ... + def select(self, key: Union[Iterable[str], str]) -> "Table": ... + # these definitions are much narrower than what's actually accepted + @classmethod + def from_object( + cls, obj: Iterable[Dict[str, Any]], *, column_types: Optional["TypeTester"] = None + ) -> "Table": ... + @classmethod + def from_csv( + cls, path: Iterable[str], *, column_types: Optional["TypeTester"] = None + ) -> "Table": ... + @classmethod + def merge(cls, tables: Iterable["Table"]) -> "Table": ... + def rename( + self, + column_names: Optional[Iterable[str]] = None, + row_names: Optional[Any] = None, + slug_columns: bool = False, + slug_rows: bool = False, + **kwargs: Any, + ) -> "Table": ... + +class TypeTester: + def __init__( + self, force: Any = ..., limit: Optional[Any] = ..., types: Optional[Any] = ... + ) -> None: ... + def run(self, rows: Any, column_names: Any): ... + +class MaxPrecision: + def __init__(self, column_name: Any) -> None: ... + +# this is not strictly true, but it's all we care about. +def aggregate(self, aggregations: MaxPrecision) -> int: ... diff --git a/third-party-stubs/agate/data_types.pyi b/third-party-stubs/agate/data_types.pyi new file mode 100644 index 00000000..8114f7b5 --- /dev/null +++ b/third-party-stubs/agate/data_types.pyi @@ -0,0 +1,71 @@ +from typing import Any, Optional + +DEFAULT_NULL_VALUES: Any + +class DataType: + null_values: Any = ... + def __init__(self, null_values: Any = ...) -> None: ... + def test(self, d: Any): ... + def cast(self, d: Any) -> None: ... + def csvify(self, d: Any): ... + def jsonify(self, d: Any): ... + +DEFAULT_TRUE_VALUES: Any +DEFAULT_FALSE_VALUES: Any + +class Boolean(DataType): + true_values: Any = ... + false_values: Any = ... + def __init__( + self, true_values: Any = ..., false_values: Any = ..., null_values: Any = ... + ) -> None: ... + def cast(self, d: Any): ... + def jsonify(self, d: Any): ... + +ZERO_DT: Any + +class Date(DataType): + date_format: Any = ... + parser: Any = ... + def __init__(self, date_format: Optional[Any] = ..., **kwargs: Any) -> None: ... + def cast(self, d: Any): ... + def csvify(self, d: Any): ... + def jsonify(self, d: Any): ... + +class DateTime(DataType): + datetime_format: Any = ... + timezone: Any = ... + def __init__( + self, datetime_format: Optional[Any] = ..., timezone: Optional[Any] = ..., **kwargs: Any + ) -> None: ... + def cast(self, d: Any): ... + def csvify(self, d: Any): ... + def jsonify(self, d: Any): ... + +DEFAULT_CURRENCY_SYMBOLS: Any +POSITIVE: Any +NEGATIVE: Any + +class Number(DataType): + locale: Any = ... + currency_symbols: Any = ... + group_symbol: Any = ... + decimal_symbol: Any = ... + def __init__( + self, + locale: str = ..., + group_symbol: Optional[Any] = ..., + decimal_symbol: Optional[Any] = ..., + currency_symbols: Any = ..., + **kwargs: Any, + ) -> None: ... + def cast(self, d: Any): ... + def jsonify(self, d: Any): ... + +class TimeDelta(DataType): + def cast(self, d: Any): ... + +class Text(DataType): + cast_nulls: Any = ... + def __init__(self, cast_nulls: bool = ..., **kwargs: Any) -> None: ... + def cast(self, d: Any): ... diff --git a/third-party-stubs/mashumaro/__init__.pyi b/third-party-stubs/mashumaro/__init__.pyi new file mode 100644 index 00000000..0a67966c --- /dev/null +++ b/third-party-stubs/mashumaro/__init__.pyi @@ -0,0 +1,3 @@ +from mashumaro.exceptions import MissingField as MissingField +from mashumaro.helper import field_options as field_options, pass_through as pass_through +from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin diff --git a/third-party-stubs/mashumaro/config.pyi b/third-party-stubs/mashumaro/config.pyi new file mode 100644 index 00000000..7b30709d --- /dev/null +++ b/third-party-stubs/mashumaro/config.pyi @@ -0,0 +1,26 @@ +from mashumaro.core.const import Sentinel +from mashumaro.dialect import Dialect +from mashumaro.types import Discriminator, SerializationStrategy +from typing import Any, Callable, Dict, List, Optional, Type, Union +from typing_extensions import Literal + +TO_DICT_ADD_BY_ALIAS_FLAG: str +TO_DICT_ADD_OMIT_NONE_FLAG: str +ADD_DIALECT_SUPPORT: str +ADD_SERIALIZATION_CONTEXT: str +SerializationStrategyValueType = Union[SerializationStrategy, Dict[str, Union[str, Callable]]] + +class BaseConfig: + debug: bool + code_generation_options: List[str] + serialization_strategy: Dict[Any, SerializationStrategyValueType] + aliases: Dict[str, str] + serialize_by_alias: bool + namedtuple_as_dict: bool + allow_postponed_evaluation: bool + dialect: Optional[Type[Dialect]] + omit_none: Union[bool, Sentinel.MISSING] + orjson_options: Optional[int] + json_schema: Dict[str, Any] + discriminator: Optional[Discriminator] + lazy_compilation: bool diff --git a/third-party-stubs/mashumaro/core/__init__.pyi b/third-party-stubs/mashumaro/core/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/third-party-stubs/mashumaro/core/const.pyi b/third-party-stubs/mashumaro/core/const.pyi new file mode 100644 index 00000000..dfcd1358 --- /dev/null +++ b/third-party-stubs/mashumaro/core/const.pyi @@ -0,0 +1,17 @@ +import enum +from _typeshed import Incomplete + +PY_37: Incomplete +PY_38: Incomplete +PY_39: Incomplete +PY_310: Incomplete +PY_311_MIN: Incomplete +PY_310_MIN: Incomplete +PY_39_MIN: Incomplete +PY_38_MIN: Incomplete +PY_37_MIN: Incomplete +PEP_585_COMPATIBLE = PY_39_MIN +PEP_586_COMPATIBLE = PY_38_MIN + +class Sentinel(enum.Enum): + MISSING: Incomplete diff --git a/third-party-stubs/mashumaro/core/helpers.pyi b/third-party-stubs/mashumaro/core/helpers.pyi new file mode 100644 index 00000000..3470d416 --- /dev/null +++ b/third-party-stubs/mashumaro/core/helpers.pyi @@ -0,0 +1,10 @@ +import datetime +from _typeshed import Incomplete + +UTC_OFFSET_PATTERN: str + +def parse_timezone(s: str) -> datetime.timezone: ... + +class ConfigValue: + name: Incomplete + def __init__(self, name: str) -> None: ... diff --git a/third-party-stubs/mashumaro/core/meta/__init__.pyi b/third-party-stubs/mashumaro/core/meta/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/third-party-stubs/mashumaro/core/meta/code/__init__.pyi b/third-party-stubs/mashumaro/core/meta/code/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/third-party-stubs/mashumaro/core/meta/code/builder.pyi b/third-party-stubs/mashumaro/core/meta/code/builder.pyi new file mode 100644 index 00000000..9d575b79 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/code/builder.pyi @@ -0,0 +1,146 @@ +import types +import typing +from _typeshed import Incomplete +from collections.abc import Generator +from dataclasses import Field +from mashumaro.config import ( + ADD_DIALECT_SUPPORT as ADD_DIALECT_SUPPORT, + ADD_SERIALIZATION_CONTEXT as ADD_SERIALIZATION_CONTEXT, + BaseConfig as BaseConfig, + SerializationStrategyValueType as SerializationStrategyValueType, + TO_DICT_ADD_BY_ALIAS_FLAG as TO_DICT_ADD_BY_ALIAS_FLAG, + TO_DICT_ADD_OMIT_NONE_FLAG as TO_DICT_ADD_OMIT_NONE_FLAG, +) +from mashumaro.core.const import Sentinel as Sentinel +from mashumaro.core.helpers import ConfigValue as ConfigValue +from mashumaro.core.meta.code.lines import CodeLines as CodeLines +from mashumaro.core.meta.helpers import ( + get_args as get_args, + get_class_that_defines_field as get_class_that_defines_field, + get_class_that_defines_method as get_class_that_defines_method, + get_literal_values as get_literal_values, + get_name_error_name as get_name_error_name, + hash_type_args as hash_type_args, + is_class_var as is_class_var, + is_dataclass_dict_mixin as is_dataclass_dict_mixin, + is_dialect_subclass as is_dialect_subclass, + is_init_var as is_init_var, + is_literal as is_literal, + is_optional as is_optional, + is_type_var_any as is_type_var_any, + resolve_type_params as resolve_type_params, + substitute_type_params as substitute_type_params, + type_name as type_name, +) +from mashumaro.core.meta.types.common import FieldContext as FieldContext, ValueSpec as ValueSpec +from mashumaro.core.meta.types.pack import PackerRegistry as PackerRegistry +from mashumaro.core.meta.types.unpack import ( + SubtypeUnpackerBuilder as SubtypeUnpackerBuilder, + UnpackerRegistry as UnpackerRegistry, +) +from mashumaro.dialect import Dialect as Dialect +from mashumaro.exceptions import ( + BadDialect as BadDialect, + BadHookSignature as BadHookSignature, + InvalidFieldValue as InvalidFieldValue, + MissingDiscriminatorError as MissingDiscriminatorError, + MissingField as MissingField, + SuitableVariantNotFoundError as SuitableVariantNotFoundError, + ThirdPartyModuleNotFoundError as ThirdPartyModuleNotFoundError, + UnresolvedTypeReferenceError as UnresolvedTypeReferenceError, + UnserializableDataError as UnserializableDataError, + UnserializableField as UnserializableField, + UnsupportedDeserializationEngine as UnsupportedDeserializationEngine, + UnsupportedSerializationEngine as UnsupportedSerializationEngine, +) +from mashumaro.types import Discriminator as Discriminator + +__PRE_SERIALIZE__: str +__PRE_DESERIALIZE__: str +__POST_SERIALIZE__: str +__POST_DESERIALIZE__: str + +class CodeBuilder: + cls: Incomplete + lines: Incomplete + globals: Incomplete + resolved_type_params: Incomplete + field_classes: Incomplete + initial_type_args: Incomplete + dialect: Incomplete + default_dialect: Incomplete + allow_postponed_evaluation: Incomplete + format_name: Incomplete + decoder: Incomplete + encoder: Incomplete + encoder_kwargs: Incomplete + def __init__( + self, + cls: typing.Type, + type_args: typing.Tuple[typing.Type, ...] = ..., + dialect: typing.Optional[typing.Type[Dialect]] = ..., + first_method: str = ..., + allow_postponed_evaluation: bool = ..., + format_name: str = ..., + decoder: typing.Optional[typing.Any] = ..., + encoder: typing.Optional[typing.Any] = ..., + encoder_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = ..., + default_dialect: typing.Optional[typing.Type[Dialect]] = ..., + ) -> None: ... + def reset(self) -> None: ... + @property + def namespace(self) -> typing.Mapping[typing.Any, typing.Any]: ... + @property + def annotations(self) -> typing.Dict[str, typing.Any]: ... + def get_field_resolved_type_params( + self, field_name: str + ) -> typing.Dict[typing.Type, typing.Type]: ... + def get_field_types(self, include_extras: bool = ...) -> typing.Dict[str, typing.Any]: ... + @property + def dataclass_fields(self) -> typing.Dict[str, Field]: ... + @property + def metadatas(self) -> typing.Dict[str, typing.Mapping[str, typing.Any]]: ... + def get_field_default(self, name: str) -> typing.Any: ... + def add_type_modules(self, *types_: typing.Type) -> None: ... + def ensure_module_imported(self, module: types.ModuleType) -> None: ... + def ensure_object_imported( + self, obj: typing.Any, name: typing.Optional[str] = ... + ) -> None: ... + def add_line(self, line: str) -> None: ... + def indent(self, expr: typing.Optional[str] = ...) -> typing.Generator[None, None, None]: ... + def compile(self) -> None: ... + def get_declared_hook(self, method_name: str) -> typing.Any: ... + def add_unpack_method(self) -> None: ... + def get_config(self, cls: Incomplete | None = ..., look_in_parents: bool = ...): ... + def get_discriminator(self) -> typing.Optional[Discriminator]: ... + def get_pack_method_flags( + self, cls: typing.Optional[typing.Type] = ..., pass_encoder: bool = ... + ) -> str: ... + def get_unpack_method_flags( + self, cls: typing.Optional[typing.Type] = ..., pass_decoder: bool = ... + ) -> str: ... + def get_pack_method_default_flag_values( + self, cls: typing.Optional[typing.Type] = ..., pass_encoder: bool = ... + ) -> str: ... + def get_unpack_method_default_flag_values(self, pass_decoder: bool = ...) -> str: ... + def is_code_generation_option_enabled( + self, option: str, cls: typing.Optional[typing.Type] = ... + ) -> bool: ... + @classmethod + def get_unpack_method_name( + cls, + type_args: typing.Iterable = ..., + format_name: str = ..., + decoder: typing.Optional[typing.Any] = ..., + ) -> str: ... + @classmethod + def get_pack_method_name( + cls, + type_args: typing.Tuple[typing.Type, ...] = ..., + format_name: str = ..., + encoder: typing.Optional[typing.Any] = ..., + ) -> str: ... + def add_pack_method(self) -> None: ... + def iter_serialization_strategies( + self, metadata, ftype + ) -> Generator[Incomplete, None, None]: ... diff --git a/third-party-stubs/mashumaro/core/meta/code/lines.pyi b/third-party-stubs/mashumaro/core/meta/code/lines.pyi new file mode 100644 index 00000000..4d9bf503 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/code/lines.pyi @@ -0,0 +1,8 @@ +from typing import Generator, Optional + +class CodeLines: + def __init__(self) -> None: ... + def append(self, line: str) -> None: ... + def indent(self, expr: Optional[str] = ...) -> Generator[None, None, None]: ... + def as_text(self) -> str: ... + def reset(self) -> None: ... diff --git a/third-party-stubs/mashumaro/core/meta/helpers.pyi b/third-party-stubs/mashumaro/core/meta/helpers.pyi new file mode 100644 index 00000000..7ec3124e --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/helpers.pyi @@ -0,0 +1,58 @@ +import typing +from typing import Any, Dict, Optional, Sequence, Tuple, Type + +def get_type_origin(typ: Type) -> Type: ... +def get_generic_name(typ: Type, short: bool = ...) -> str: ... +def get_args(typ: Optional[Type]) -> Tuple[Type, ...]: ... +def get_literal_values(typ: Type) -> Tuple[Any, ...]: ... +def type_name( + typ: Optional[Type], + short: bool = ..., + resolved_type_params: Optional[Dict[Type, Type]] = ..., + is_type_origin: bool = ..., + none_type_as_none: bool = ..., +) -> str: ... +def is_special_typing_primitive(typ: Any) -> bool: ... +def is_generic(typ: Type) -> bool: ... +def is_typed_dict(typ: Type) -> bool: ... +def is_named_tuple(typ: Type) -> bool: ... +def is_new_type(typ: Type) -> bool: ... +def is_union(typ: Type) -> bool: ... +def is_optional(typ: Type, resolved_type_params: Optional[Dict[Type, Type]] = ...) -> bool: ... +def is_annotated(typ: Type) -> bool: ... +def get_type_annotations(typ: Type) -> Sequence[Any]: ... +def is_literal(typ: Type) -> bool: ... +def not_none_type_arg( + type_args: Tuple[Type, ...], resolved_type_params: Optional[Dict[Type, Type]] = ... +) -> Optional[Type]: ... +def is_type_var(typ: Type) -> bool: ... +def is_type_var_any(typ: Type) -> bool: ... +def is_class_var(typ: Type) -> bool: ... +def is_final(typ: Type) -> bool: ... +def is_init_var(typ: Type) -> bool: ... +def get_class_that_defines_method(method_name: str, cls: Type) -> Optional[Type]: ... +def get_class_that_defines_field(field_name: str, cls: Type) -> Optional[Type]: ... +def is_dataclass_dict_mixin(typ: Type) -> bool: ... +def is_dataclass_dict_mixin_subclass(typ: Type) -> bool: ... +def collect_type_params(typ: Type) -> Sequence[Type]: ... +def resolve_type_params( + typ: Type, type_args: Sequence[Type] = ..., include_bases: bool = ... +) -> Dict[Type, Dict[Type, Type]]: ... +def substitute_type_params(typ: Type, substitutions: Dict[Type, Type]) -> Type: ... +def get_name_error_name(e: NameError) -> str: ... +def is_dialect_subclass(typ: Type) -> bool: ... +def is_self(typ: Type) -> bool: ... +def is_required(typ: Type) -> bool: ... +def is_not_required(typ: Type) -> bool: ... +def get_function_arg_annotation( + function: typing.Callable[[Any], Any], + arg_name: typing.Optional[str] = ..., + arg_pos: typing.Optional[int] = ..., +) -> typing.Type: ... +def get_function_return_annotation( + function: typing.Callable[[typing.Any], typing.Any] +) -> typing.Type: ... +def is_unpack(typ: Type) -> bool: ... +def is_type_var_tuple(typ: Type) -> bool: ... +def hash_type_args(type_args: typing.Iterable[typing.Type]) -> str: ... +def iter_all_subclasses(cls) -> typing.Iterator[Type]: ... diff --git a/third-party-stubs/mashumaro/core/meta/mixin.pyi b/third-party-stubs/mashumaro/core/meta/mixin.pyi new file mode 100644 index 00000000..1d6734e0 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/mixin.pyi @@ -0,0 +1,13 @@ +from mashumaro.dialect import Dialect +from typing import Any, Dict, Optional, Tuple, Type + +def compile_mixin_packer( + cls, + format_name: str = ..., + dialect: Optional[Type[Dialect]] = ..., + encoder: Any = ..., + encoder_kwargs: Optional[Dict[str, Dict[str, Tuple[str, Any]]]] = ..., +) -> None: ... +def compile_mixin_unpacker( + cls, format_name: str = ..., dialect: Optional[Type[Dialect]] = ..., decoder: Any = ... +) -> None: ... diff --git a/third-party-stubs/mashumaro/core/meta/types/__init__.pyi b/third-party-stubs/mashumaro/core/meta/types/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/third-party-stubs/mashumaro/core/meta/types/common.pyi b/third-party-stubs/mashumaro/core/meta/types/common.pyi new file mode 100644 index 00000000..68ced553 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/types/common.pyi @@ -0,0 +1,67 @@ +from _typeshed import Incomplete +from functools import cached_property +from mashumaro.core.const import PEP_585_COMPATIBLE as PEP_585_COMPATIBLE +from mashumaro.core.meta.code.builder import CodeBuilder as CodeBuilder +from mashumaro.core.meta.helpers import ( + get_type_origin as get_type_origin, + is_annotated as is_annotated, + is_generic as is_generic, + type_name as type_name, +) +from mashumaro.exceptions import ( + UnserializableDataError as UnserializableDataError, + UnserializableField as UnserializableField, +) +from typing import Any, Dict, Mapping, Optional, Sequence, Type, TypeVar +from typing_extensions import TypeAlias + +cached_property = property +NoneType: Incomplete +Expression: TypeAlias +P: Incomplete +T = TypeVar("T") + +class ExpressionWrapper: + expression: Incomplete + def __init__(self, expression: str) -> None: ... + +PROPER_COLLECTION_TYPES: Dict[Type, str] + +class FieldContext: + name: str + metadata: Mapping + def copy(self, **changes: Any) -> FieldContext: ... + def __init__(self, name, metadata) -> None: ... + +class ValueSpec: + type: Type + origin_type: Type + expression: Expression + builder: CodeBuilder + field_ctx: FieldContext + could_be_none: bool + annotated_type: Optional[Type] + def __setattr__(self, key: str, value: Any) -> None: ... + def copy(self, **changes: Any) -> ValueSpec: ... + @cached_property + def annotations(self) -> Sequence[str]: ... + def __init__( + self, type, expression, builder, field_ctx, could_be_none, annotated_type + ) -> None: ... + +ValueSpecExprCreator: TypeAlias + +class Registry: + def register(self, function: ValueSpecExprCreator) -> ValueSpecExprCreator: ... + def get(self, spec: ValueSpec) -> Expression: ... + def __init__(self, _registry) -> None: ... + +def ensure_generic_collection(spec: ValueSpec) -> bool: ... +def ensure_collection_type_args_supported( + collection_type: Type, type_args: Sequence[Type] +) -> bool: ... +def ensure_generic_collection_subclass(spec: ValueSpec, *checked_types: Type) -> bool: ... +def ensure_generic_mapping(spec: ValueSpec, args: Sequence[Type], checked_type: Type) -> bool: ... +def expr_or_maybe_none(spec: ValueSpec, new_expr: Expression) -> Expression: ... +def random_hex() -> str: ... +def clean_id(value: str) -> str: ... diff --git a/third-party-stubs/mashumaro/core/meta/types/pack.pyi b/third-party-stubs/mashumaro/core/meta/types/pack.pyi new file mode 100644 index 00000000..3231d887 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/types/pack.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +PackerRegistry: Incomplete diff --git a/third-party-stubs/mashumaro/core/meta/types/unpack.pyi b/third-party-stubs/mashumaro/core/meta/types/unpack.pyi new file mode 100644 index 00000000..47020521 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/types/unpack.pyi @@ -0,0 +1,34 @@ +import abc +from _typeshed import Incomplete +from abc import ABC, abstractmethod +from mashumaro.core.meta.types.common import ValueSpec +from mashumaro.types import Discriminator +from typing import Optional, Tuple, Type + +UnpackerRegistry: Incomplete + +class AbstractUnpackerBuilder(ABC, metaclass=abc.ABCMeta): + @abstractmethod + def get_method_prefix(self) -> str: ... + def build(self, spec: ValueSpec) -> str: ... + +class UnionUnpackerBuilder(AbstractUnpackerBuilder): + union_args: Incomplete + def __init__(self, args: Tuple[Type, ...]) -> None: ... + def get_method_prefix(self) -> str: ... + +class TypeVarUnpackerBuilder(UnionUnpackerBuilder): + def get_method_prefix(self) -> str: ... + +class LiteralUnpackerBuilder(AbstractUnpackerBuilder): + def get_method_prefix(self) -> str: ... + +class DiscriminatedUnionUnpackerBuilder(AbstractUnpackerBuilder): + discriminator: Incomplete + base_variants: Incomplete + def __init__( + self, discriminator: Discriminator, base_variants: Optional[Tuple[Type, ...]] = ... + ) -> None: ... + def get_method_prefix(self) -> str: ... + +class SubtypeUnpackerBuilder(DiscriminatedUnionUnpackerBuilder): ... diff --git a/third-party-stubs/mashumaro/dialect.pyi b/third-party-stubs/mashumaro/dialect.pyi new file mode 100644 index 00000000..3f93bcfa --- /dev/null +++ b/third-party-stubs/mashumaro/dialect.pyi @@ -0,0 +1,10 @@ +from mashumaro.core.const import Sentinel +from mashumaro.types import SerializationStrategy +from typing import Callable, Dict, Union +from typing_extensions import Literal + +SerializationStrategyValueType = Union[SerializationStrategy, Dict[str, Union[str, Callable]]] + +class Dialect: + serialization_strategy: Dict[str, SerializationStrategyValueType] + omit_none: Union[bool, Sentinel.MISSING] diff --git a/third-party-stubs/mashumaro/exceptions.pyi b/third-party-stubs/mashumaro/exceptions.pyi new file mode 100644 index 00000000..d4c536a2 --- /dev/null +++ b/third-party-stubs/mashumaro/exceptions.pyi @@ -0,0 +1,91 @@ +from _typeshed import Incomplete +from mashumaro.core.meta.helpers import type_name as type_name +from typing import Any, Optional, Type + +class MissingField(LookupError): + field_name: Incomplete + field_type: Incomplete + holder_class: Incomplete + def __init__(self, field_name: str, field_type: Type, holder_class: Type) -> None: ... + @property + def field_type_name(self) -> str: ... + @property + def holder_class_name(self) -> str: ... + +class UnserializableDataError(TypeError): ... + +class UnserializableField(UnserializableDataError): + field_name: Incomplete + field_type: Incomplete + holder_class: Incomplete + msg: Incomplete + def __init__( + self, field_name: str, field_type: Type, holder_class: Type, msg: Optional[str] = ... + ) -> None: ... + @property + def field_type_name(self) -> str: ... + @property + def holder_class_name(self) -> str: ... + +class UnsupportedSerializationEngine(UnserializableField): + def __init__( + self, field_name: str, field_type: Type, holder_class: Type, engine: Any + ) -> None: ... + +class UnsupportedDeserializationEngine(UnserializableField): + def __init__( + self, field_name: str, field_type: Type, holder_class: Type, engine: Any + ) -> None: ... + +class InvalidFieldValue(ValueError): + field_name: Incomplete + field_type: Incomplete + field_value: Incomplete + holder_class: Incomplete + msg: Incomplete + def __init__( + self, + field_name: str, + field_type: Type, + field_value: Any, + holder_class: Type, + msg: Optional[str] = ..., + ) -> None: ... + @property + def field_type_name(self) -> str: ... + @property + def holder_class_name(self) -> str: ... + +class MissingDiscriminatorError(LookupError): + field_name: Incomplete + def __init__(self, field_name: str) -> None: ... + +class SuitableVariantNotFoundError(ValueError): + variants_type: Incomplete + discriminator_name: Incomplete + discriminator_value: Incomplete + def __init__( + self, + variants_type: Type, + discriminator_name: Optional[str] = ..., + discriminator_value: Any = ..., + ) -> None: ... + +class BadHookSignature(TypeError): ... + +class ThirdPartyModuleNotFoundError(ModuleNotFoundError): + module_name: Incomplete + field_name: Incomplete + holder_class: Incomplete + def __init__(self, module_name: str, field_name: str, holder_class: Type) -> None: ... + @property + def holder_class_name(self) -> str: ... + +class UnresolvedTypeReferenceError(NameError): + holder_class: Incomplete + name: Incomplete + def __init__(self, holder_class: Type, unresolved_type_name: str) -> None: ... + @property + def holder_class_name(self) -> str: ... + +class BadDialect(ValueError): ... diff --git a/third-party-stubs/mashumaro/helper.pyi b/third-party-stubs/mashumaro/helper.pyi new file mode 100644 index 00000000..0eb8254d --- /dev/null +++ b/third-party-stubs/mashumaro/helper.pyi @@ -0,0 +1,27 @@ +from mashumaro.types import SerializationStrategy +from typing import Any, Callable, Dict, Optional, TypeVar, Union +from typing_extensions import Literal + +NamedTupleDeserializationEngine = Literal["as_dict", "as_list"] +DateTimeDeserializationEngine = Literal["ciso8601", "pendulum"] +AnyDeserializationEngine = Literal[NamedTupleDeserializationEngine, DateTimeDeserializationEngine] + +NamedTupleSerializationEngine = Literal["as_dict", "as_list"] +AnySerializationEngine = Union[NamedTupleSerializationEngine, OmitSerializationEngine] +OmitSerializationEngine = Literal["omit"] + +T = TypeVar("T") + +def field_options( + serialize: Optional[Union[AnySerializationEngine, Callable[[Any], Any]]] = ..., + deserialize: Optional[Union[AnyDeserializationEngine, Callable[[Any], Any]]] = ..., + serialization_strategy: Optional[SerializationStrategy] = ..., + alias: Optional[str] = ..., +) -> Dict[str, Any]: ... + +class _PassThrough(SerializationStrategy): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def serialize(self, value: T) -> T: ... + def deserialize(self, value: T) -> T: ... + +pass_through: Any diff --git a/third-party-stubs/mashumaro/jsonschema/__init__.pyi b/third-party-stubs/mashumaro/jsonschema/__init__.pyi new file mode 100644 index 00000000..9c6436c6 --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/__init__.pyi @@ -0,0 +1,2 @@ +from .builder import JSONSchemaBuilder as JSONSchemaBuilder, build_json_schema as build_json_schema +from .dialects import DRAFT_2020_12 as DRAFT_2020_12, OPEN_API_3_1 as OPEN_API_3_1 diff --git a/third-party-stubs/mashumaro/jsonschema/annotations.pyi b/third-party-stubs/mashumaro/jsonschema/annotations.pyi new file mode 100644 index 00000000..f39d8003 --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/annotations.pyi @@ -0,0 +1,80 @@ +from mashumaro.jsonschema.models import JSONSchema, Number +from typing import Dict, Set + +class Annotation: ... +class Constraint(Annotation): ... +class NumberConstraint(Constraint): ... + +class Minimum(NumberConstraint): + value: Number + def __init__(self, value) -> None: ... + +class Maximum(NumberConstraint): + value: Number + def __init__(self, value) -> None: ... + +class ExclusiveMinimum(NumberConstraint): + value: Number + def __init__(self, value) -> None: ... + +class ExclusiveMaximum(NumberConstraint): + value: Number + def __init__(self, value) -> None: ... + +class MultipleOf(NumberConstraint): + value: Number + def __init__(self, value) -> None: ... + +class StringConstraint(Constraint): ... + +class MinLength(StringConstraint): + value: int + def __init__(self, value) -> None: ... + +class MaxLength(StringConstraint): + value: int + def __init__(self, value) -> None: ... + +class Pattern(StringConstraint): + value: str + def __init__(self, value) -> None: ... + +class ArrayConstraint(Constraint): ... + +class MinItems(ArrayConstraint): + value: int + def __init__(self, value) -> None: ... + +class MaxItems(ArrayConstraint): + value: int + def __init__(self, value) -> None: ... + +class UniqueItems(ArrayConstraint): + value: bool + def __init__(self, value) -> None: ... + +class Contains(ArrayConstraint): + value: JSONSchema + def __init__(self, value) -> None: ... + +class MinContains(ArrayConstraint): + value: int + def __init__(self, value) -> None: ... + +class MaxContains(ArrayConstraint): + value: int + def __init__(self, value) -> None: ... + +class ObjectConstraint(Constraint): ... + +class MaxProperties(ObjectConstraint): + value: int + def __init__(self, value) -> None: ... + +class MinProperties(ObjectConstraint): + value: int + def __init__(self, value) -> None: ... + +class DependentRequired(ObjectConstraint): + value: Dict[str, Set[str]] + def __init__(self, value) -> None: ... diff --git a/third-party-stubs/mashumaro/jsonschema/builder.pyi b/third-party-stubs/mashumaro/jsonschema/builder.pyi new file mode 100644 index 00000000..98bbc860 --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/builder.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete +from mashumaro.jsonschema.dialects import JSONSchemaDialect +from mashumaro.jsonschema.models import Context, JSONSchema +from mashumaro.mixins.json import DataClassJSONMixin +from typing import Any, Dict, List, Optional, Type + +def build_json_schema( + instance_type: Type, + context: Optional[Context] = ..., + with_definitions: bool = ..., + all_refs: Optional[bool] = ..., + with_dialect_uri: bool = ..., + dialect: Optional[JSONSchemaDialect] = ..., + ref_prefix: Optional[str] = ..., +) -> JSONSchema: ... + +class JSONSchemaDefinitions(DataClassJSONMixin): + definitions: Dict[str, JSONSchema] + def __post_serialize__(self, d: Dict[Any, Any]) -> List[Dict[str, Any]]: ... # type: ignore + def __init__(self, definitions) -> None: ... + +class JSONSchemaBuilder: + context: Incomplete + def __init__( + self, + dialect: JSONSchemaDialect = ..., + all_refs: Optional[bool] = ..., + ref_prefix: Optional[str] = ..., + ) -> None: ... + def build(self, instance_type: Type) -> JSONSchema: ... + def get_definitions(self) -> JSONSchemaDefinitions: ... diff --git a/third-party-stubs/mashumaro/jsonschema/dialects.pyi b/third-party-stubs/mashumaro/jsonschema/dialects.pyi new file mode 100644 index 00000000..88af0707 --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/dialects.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +class JSONSchemaDialect: + uri: str + definitions_root_pointer: str + all_refs: bool + def __init__(self, uri, definitions_root_pointer, all_refs) -> None: ... + +class JSONSchemaDraft202012Dialect(JSONSchemaDialect): + uri: str + definitions_root_pointer: str + all_refs: bool + def __init__(self, uri, definitions_root_pointer, all_refs) -> None: ... + +class OpenAPISchema31Dialect(JSONSchemaDialect): + uri: str + definitions_root_pointer: str + all_refs: bool + def __init__(self, uri, definitions_root_pointer, all_refs) -> None: ... + +DRAFT_2020_12: Incomplete +OPEN_API_3_1: Incomplete diff --git a/third-party-stubs/mashumaro/jsonschema/models.pyi b/third-party-stubs/mashumaro/jsonschema/models.pyi new file mode 100644 index 00000000..b67db67b --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/models.pyi @@ -0,0 +1,243 @@ +from _typeshed import Incomplete +from enum import Enum +from mashumaro.config import BaseConfig as BaseConfig +from mashumaro.helper import pass_through as pass_through +from mashumaro.jsonschema.dialects import ( + DRAFT_2020_12 as DRAFT_2020_12, + JSONSchemaDialect as JSONSchemaDialect, +) +from mashumaro.mixins.json import DataClassJSONMixin as DataClassJSONMixin +from typing import Any, Dict, List, Optional, Set, Union +from typing_extensions import TypeAlias + +Number: TypeAlias = Union[int, float] +Null = object() + +class JSONSchemaInstanceType(Enum): + NULL: str + BOOLEAN: str + OBJECT: str + ARRAY: str + NUMBER: str + STRING: str + INTEGER: str + +class JSONSchemaInstanceFormat(Enum): ... + +class JSONSchemaStringFormat(JSONSchemaInstanceFormat): + DATETIME: str + DATE: str + TIME: str + DURATION: str + EMAIL: str + IDN_EMAIL: str + HOSTNAME: str + IDN_HOSTNAME: str + IPV4ADDRESS: str + IPV6ADDRESS: str + URI: str + URI_REFERENCE: str + IRI: str + IRI_REFERENCE: str + UUID: str + URI_TEMPLATE: str + JSON_POINTER: str + RELATIVE_JSON_POINTER: str + REGEX: str + +class JSONSchemaInstanceFormatExtension(JSONSchemaInstanceFormat): + TIMEDELTA: str + TIME_ZONE: str + IPV4NETWORK: str + IPV6NETWORK: str + IPV4INTERFACE: str + IPV6INTERFACE: str + DECIMAL: str + FRACTION: str + BASE64: str + PATH: str + +DATETIME_FORMATS: Incomplete +IPADDRESS_FORMATS: Incomplete + +class JSONSchema(DataClassJSONMixin): + schema: Optional[str] + type: Optional[JSONSchemaInstanceType] + enum: Optional[List[Any]] + const: Optional[Any] + format: Optional[ + Union[JSONSchemaInstanceFormat, JSONSchemaStringFormat, JSONSchemaInstanceFormatExtension] + ] + title: Optional[str] + description: Optional[str] + anyOf: Optional[List["JSONSchema"]] + reference: Optional[str] + definitions: Optional[Dict[str, "JSONSchema"]] + default: Optional[Any] + deprecated: Optional[bool] + examples: Optional[List[Any]] + properties: Optional[Dict[str, "JSONSchema"]] + patternProperties: Optional[Dict[str, "JSONSchema"]] + additionalProperties: Union["JSONSchema", bool, None] + propertyNames: Optional["JSONSchema"] + prefixItems: Optional[List["JSONSchema"]] + items: Optional["JSONSchema"] + contains: Optional["JSONSchema"] + multipleOf: Optional[Number] + maximum: Optional[Number] + exclusiveMaximum: Optional[Number] + minimum: Optional[Number] + exclusiveMinimum: Optional[Number] + maxLength: Optional[int] + minLength: Optional[int] + pattern: Optional[str] + maxItems: Optional[int] + minItems: Optional[int] + uniqueItems: Optional[bool] + maxContains: Optional[int] + minContains: Optional[int] + maxProperties: Optional[int] + minProperties: Optional[int] + required: Optional[List[str]] + dependentRequired: Optional[Dict[str, Set[str]]] + + class Config(BaseConfig): + omit_none: bool + serialize_by_alias: bool + aliases: Incomplete + serialization_strategy: Incomplete + def __pre_serialize__(self) -> JSONSchema: ... + def __post_serialize__(self, d: Dict[Any, Any]) -> Dict[Any, Any]: ... + def __init__( + self, + schema, + type, + enum, + const, + format, + title, + description, + anyOf, + reference, + definitions, + default, + deprecated, + examples, + properties, + patternProperties, + additionalProperties, + propertyNames, + prefixItems, + items, + contains, + multipleOf, + maximum, + exclusiveMaximum, + minimum, + exclusiveMinimum, + maxLength, + minLength, + pattern, + maxItems, + minItems, + uniqueItems, + maxContains, + minContains, + maxProperties, + minProperties, + required, + dependentRequired, + ) -> None: ... + +class JSONObjectSchema(JSONSchema): + type: JSONSchemaInstanceType + def __init__( + self, + schema, + type, + enum, + const, + format, + title, + description, + anyOf, + reference, + definitions, + default, + deprecated, + examples, + properties, + patternProperties, + additionalProperties, + propertyNames, + prefixItems, + items, + contains, + multipleOf, + maximum, + exclusiveMaximum, + minimum, + exclusiveMinimum, + maxLength, + minLength, + pattern, + maxItems, + minItems, + uniqueItems, + maxContains, + minContains, + maxProperties, + minProperties, + required, + dependentRequired, + ) -> None: ... + +class JSONArraySchema(JSONSchema): + type: JSONSchemaInstanceType + def __init__( + self, + schema, + type, + enum, + const, + format, + title, + description, + anyOf, + reference, + definitions, + default, + deprecated, + examples, + properties, + patternProperties, + additionalProperties, + propertyNames, + prefixItems, + items, + contains, + multipleOf, + maximum, + exclusiveMaximum, + minimum, + exclusiveMinimum, + maxLength, + minLength, + pattern, + maxItems, + minItems, + uniqueItems, + maxContains, + minContains, + maxProperties, + minProperties, + required, + dependentRequired, + ) -> None: ... + +class Context: + dialect: JSONSchemaDialect + definitions: Dict[str, JSONSchema] + all_refs: Optional[bool] + ref_prefix: Optional[str] + def __init__(self, dialect, definitions, all_refs, ref_prefix) -> None: ... diff --git a/third-party-stubs/mashumaro/jsonschema/schema.pyi b/third-party-stubs/mashumaro/jsonschema/schema.pyi new file mode 100644 index 00000000..e3cd1b5c --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/schema.pyi @@ -0,0 +1,72 @@ +from mashumaro.config import BaseConfig +from mashumaro.jsonschema.annotations import Annotation +from mashumaro.jsonschema.models import Context, JSONSchema +from typing import Any, Callable, Iterable, List, Mapping, Optional, Tuple, Type, Union + +class Instance: + type: Type + name: Optional[str] + origin_type: Type + annotations: List[Annotation] + @property + def metadata(self) -> Mapping[str, Any]: ... + @property + def alias(self) -> Optional[str]: ... + @property + def holder_class(self) -> Optional[Type]: ... + def copy(self, **changes: Any) -> Instance: ... + def __post_init__(self) -> None: ... + def update_type(self, new_type: Type) -> None: ... + def fields(self) -> Iterable[Tuple[str, Type, bool, Any]]: ... + def get_overridden_serialization_method(self) -> Optional[Union[Callable, str]]: ... + def get_config(self) -> Type[BaseConfig]: ... + def __init__(self, type, name, __builder) -> None: ... + +class InstanceSchemaCreatorRegistry: + def register(self, func: InstanceSchemaCreator) -> InstanceSchemaCreator: ... + def iter(self) -> Iterable[InstanceSchemaCreator]: ... + def __init__(self, _registry) -> None: ... + +class EmptyJSONSchema(JSONSchema): + def __init__( + self, + schema, + type, + enum, + const, + format, + title, + description, + anyOf, + reference, + definitions, + default, + deprecated, + examples, + properties, + patternProperties, + additionalProperties, + propertyNames, + prefixItems, + items, + contains, + multipleOf, + maximum, + exclusiveMaximum, + minimum, + exclusiveMinimum, + maxLength, + minLength, + pattern, + maxItems, + minItems, + uniqueItems, + maxContains, + minContains, + maxProperties, + minProperties, + required, + dependentRequired, + ) -> None: ... + +def get_schema(instance: Instance, ctx: Context, with_dialect_uri: bool = ...) -> JSONSchema: ... diff --git a/third-party-stubs/mashumaro/mixins/__init__.pyi b/third-party-stubs/mashumaro/mixins/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/third-party-stubs/mashumaro/mixins/dict.pyi b/third-party-stubs/mashumaro/mixins/dict.pyi new file mode 100644 index 00000000..87728396 --- /dev/null +++ b/third-party-stubs/mashumaro/mixins/dict.pyi @@ -0,0 +1,15 @@ +from typing import Any, Dict, Mapping, Type, TypeVar + +T = TypeVar("T", bound="DataClassDictMixin") + +class DataClassDictMixin: + def __init_subclass__(cls: Type[T], **kwargs: Any) -> None: ... + def to_dict(self, **kwargs: Any) -> dict: ... + @classmethod + def from_dict(cls, d: Mapping, **kwargs: Any) -> Any: ... + @classmethod + def __pre_deserialize__(cls: Type[T], d: Dict[Any, Any]) -> Dict[Any, Any]: ... + @classmethod + def __post_deserialize__(cls: Type[T], obj: T) -> T: ... + def __pre_serialize__(self: T) -> T: ... + def __post_serialize__(self, d: Dict[Any, Any]) -> Dict[Any, Any]: ... diff --git a/third-party-stubs/mashumaro/mixins/json.pyi b/third-party-stubs/mashumaro/mixins/json.pyi new file mode 100644 index 00000000..267c277e --- /dev/null +++ b/third-party-stubs/mashumaro/mixins/json.pyi @@ -0,0 +1,18 @@ +from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin +from typing import Any, Callable, Dict, TypeVar, Union, Type + +EncodedData = Union[str, bytes, bytearray] +T = TypeVar("T", bound="DataClassJSONMixin") + +class Encoder: + def __call__(self, obj: Any, **kwargs: Any) -> EncodedData: ... + +class Decoder: + def __call__(self, s: EncodedData, **kwargs: Any) -> Dict[Any, Any]: ... + +class DataClassJSONMixin(DataClassDictMixin): + def to_json(self, encoder: Encoder = ..., **to_dict_kwargs: Any) -> EncodedData: ... + @classmethod + def from_json( + cls: Type[T], data: EncodedData, decoder: Decoder = ..., **from_dict_kwargs: Any + ) -> T: ... diff --git a/third-party-stubs/mashumaro/mixins/msgpack.pyi b/third-party-stubs/mashumaro/mixins/msgpack.pyi new file mode 100644 index 00000000..d1467bf4 --- /dev/null +++ b/third-party-stubs/mashumaro/mixins/msgpack.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete +from mashumaro.dialect import Dialect as Dialect +from mashumaro.helper import pass_through as pass_through +from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin +from typing import Any, Callable, Dict, TypeVar, Type + +T = TypeVar("T", bound="DataClassMessagePackMixin") +EncodedData = bytes +Encoder = Callable[[Any], EncodedData] +Decoder = Callable[[EncodedData], Dict[Any, Any]] + +class MessagePackDialect(Dialect): + serialization_strategy: Incomplete + +def default_encoder(data: Any) -> EncodedData: ... +def default_decoder(data: EncodedData) -> Dict[Any, Any]: ... + +class DataClassMessagePackMixin(DataClassDictMixin): + def to_msgpack(self, encoder: Encoder = ..., **to_dict_kwargs: Any) -> EncodedData: ... + @classmethod + def from_msgpack( + cls: Type[T], data: EncodedData, decoder: Decoder = ..., **from_dict_kwargs: Any + ) -> T: ... diff --git a/third-party-stubs/mashumaro/mixins/orjson.pyi b/third-party-stubs/mashumaro/mixins/orjson.pyi new file mode 100644 index 00000000..d56f063e --- /dev/null +++ b/third-party-stubs/mashumaro/mixins/orjson.pyi @@ -0,0 +1,23 @@ +from mashumaro.dialect import Dialect as Dialect +from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin +from typing import Any, Callable, Dict, TypeVar, Union + +T = TypeVar("T", bound="DataClassORJSONMixin") +EncodedData = Union[str, bytes, bytearray] +Encoder = Callable[[Any], EncodedData] +Decoder = Callable[[EncodedData], Dict[Any, Any]] + +class OrjsonDialect(Dialect): + serialization_strategy: Any + +class DataClassORJSONMixin(DataClassDictMixin): + def to_jsonb( + self, encoder: Encoder = ..., *, orjson_options: int = ..., **to_dict_kwargs: Any + ) -> bytes: ... + def to_json( + self, encoder: Encoder = ..., *, orjson_options: int = ..., **to_dict_kwargs: Any + ) -> bytes: ... + @classmethod + def from_json( + cls, data: EncodedData, decoder: Decoder = ..., **from_dict_kwargs: Any + ) -> T: ... diff --git a/third-party-stubs/mashumaro/mixins/toml.pyi b/third-party-stubs/mashumaro/mixins/toml.pyi new file mode 100644 index 00000000..bb56adee --- /dev/null +++ b/third-party-stubs/mashumaro/mixins/toml.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from mashumaro.dialect import Dialect as Dialect +from mashumaro.helper import pass_through as pass_through +from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin +from typing import Any, Callable, Dict, TypeVar + +T = TypeVar("T", bound="DataClassTOMLMixin") +EncodedData = str +Encoder = Callable[[Any], EncodedData] +Decoder = Callable[[EncodedData], Dict[Any, Any]] + +class TOMLDialect(Dialect): + omit_none: bool + serialization_strategy: Incomplete + +class DataClassTOMLMixin(DataClassDictMixin): + def to_toml(self, encoder: Encoder = ..., **to_dict_kwargs: Any) -> EncodedData: ... + @classmethod + def from_toml( + cls, data: EncodedData, decoder: Decoder = ..., **from_dict_kwargs: Any + ) -> T: ... diff --git a/third-party-stubs/mashumaro/mixins/yaml.pyi b/third-party-stubs/mashumaro/mixins/yaml.pyi new file mode 100644 index 00000000..25e3571b --- /dev/null +++ b/third-party-stubs/mashumaro/mixins/yaml.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete +from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin +from typing import Any, Callable, Dict, TypeVar, Union + +T = TypeVar("T", bound="DataClassYAMLMixin") +EncodedData = Union[str, bytes] +Encoder = Callable[[Any], EncodedData] +Decoder = Callable[[EncodedData], Dict[Any, Any]] +DefaultLoader: Incomplete +DefaultDumper: Incomplete + +def default_encoder(data: Any) -> EncodedData: ... +def default_decoder(data: EncodedData) -> Dict[Any, Any]: ... + +class DataClassYAMLMixin(DataClassDictMixin): + def to_yaml(self, encoder: Encoder = ..., **to_dict_kwargs: Any) -> EncodedData: ... + @classmethod + def from_yaml( + cls, data: EncodedData, decoder: Decoder = ..., **from_dict_kwargs: Any + ) -> T: ... diff --git a/third-party-stubs/mashumaro/types.pyi b/third-party-stubs/mashumaro/types.pyi new file mode 100644 index 00000000..536c5d2c --- /dev/null +++ b/third-party-stubs/mashumaro/types.pyi @@ -0,0 +1,26 @@ +import decimal +from _typeshed import Incomplete +from mashumaro.core.const import Sentinel +from typing import Any, Optional, Union +from typing_extensions import Literal + +class SerializableType: ... +class GenericSerializableType: ... + +class SerializationStrategy: + def serialize(self, value: Any) -> Any: ... + def deserialize(self, value: Any) -> Any: ... + +class RoundedDecimal(SerializationStrategy): + exp: Incomplete + rounding: Incomplete + def __init__(self, places: Optional[int] = ..., rounding: Optional[str] = ...) -> None: ... + def serialize(self, value: decimal.Decimal) -> str: ... + def deserialize(self, value: str) -> decimal.Decimal: ... + +class Discriminator: + field: Optional[str] + include_supertypes: bool + include_subtypes: bool + def __post_init__(self) -> None: ... + def __init__(self, field, include_supertypes, include_subtypes) -> None: ...